[ 475.482892] env[62096]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'linux_bridge' {{(pid=62096) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 475.483239] env[62096]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'noop' {{(pid=62096) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 475.483328] env[62096]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'ovs' {{(pid=62096) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 475.483604] env[62096]: INFO os_vif [-] Loaded VIF plugins: linux_bridge, noop, ovs [ 475.567747] env[62096]: DEBUG oslo_concurrency.processutils [-] Running cmd (subprocess): grep -F node.session.scan /sbin/iscsiadm {{(pid=62096) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:390}} [ 475.577101] env[62096]: DEBUG oslo_concurrency.processutils [-] CMD "grep -F node.session.scan /sbin/iscsiadm" returned: 0 in 0.009s {{(pid=62096) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:428}} [ 475.621606] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-60573931-f6f4-4cba-ae13-c063738be75f None None] Creating reply queue: reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 475.630074] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-60573931-f6f4-4cba-ae13-c063738be75f None None] Expecting reply to msg 142e057cbc7545668068d05cc6ca8fc2 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 475.643688] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 142e057cbc7545668068d05cc6ca8fc2 [ 476.180279] env[62096]: INFO nova.virt.driver [None req-60573931-f6f4-4cba-ae13-c063738be75f None None] Loading compute driver 'vmwareapi.VMwareVCDriver' [ 476.250985] env[62096]: DEBUG oslo_concurrency.lockutils [-] Acquiring lock "oslo_vmware_api_lock" by "oslo_vmware.api.VMwareAPISession._create_session" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 476.251193] env[62096]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" acquired by "oslo_vmware.api.VMwareAPISession._create_session" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 476.251250] env[62096]: DEBUG oslo_vmware.service [-] Creating suds client with soap_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk' and wsdl_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk/vimService.wsdl' {{(pid=62096) __init__ /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:242}} [ 479.361563] env[62096]: DEBUG oslo_vmware.service [-] Invoking ServiceInstance.RetrieveServiceContent with opID=oslo.vmware-d326427d-91b2-4896-9dce-ef97541641f8 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 479.378067] env[62096]: DEBUG oslo_vmware.api [-] Logging into host: vc1.osci.c.eu-de-1.cloud.sap. {{(pid=62096) _create_session /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:242}} [ 479.378228] env[62096]: DEBUG oslo_vmware.service [-] Invoking SessionManager.Login with opID=oslo.vmware-25c30da5-e335-449d-a07a-d0028a7085a2 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 479.404638] env[62096]: INFO oslo_vmware.api [-] Successfully established new session; session ID is 7157d. [ 479.404818] env[62096]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" "released" by "oslo_vmware.api.VMwareAPISession._create_session" :: held 3.154s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 479.405372] env[62096]: INFO nova.virt.vmwareapi.driver [None req-60573931-f6f4-4cba-ae13-c063738be75f None None] VMware vCenter version: 7.0.3 [ 479.408740] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7486459b-7a86-4248-9763-de21ed062773 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 479.425754] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65facd48-f002-4d8e-bbca-c76babae9017 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 479.431766] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7af72a91-586d-4825-a230-6b2775332713 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 479.438388] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a049f361-6b25-454c-95df-bd0db2a57170 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 479.451230] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b3690c4-7817-4892-9ec9-6989c703dce1 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 479.457124] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db0b7264-af62-4487-8c10-8e3dff5818e0 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 479.487056] env[62096]: DEBUG oslo_vmware.service [-] Invoking ExtensionManager.FindExtension with opID=oslo.vmware-12526ce4-b8f6-41de-8ec1-ee400f2469db {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 479.492261] env[62096]: DEBUG nova.virt.vmwareapi.driver [None req-60573931-f6f4-4cba-ae13-c063738be75f None None] Extension org.openstack.compute already exists. {{(pid=62096) _register_openstack_extension /opt/stack/nova/nova/virt/vmwareapi/driver.py:224}} [ 479.495032] env[62096]: INFO nova.compute.provider_config [None req-60573931-f6f4-4cba-ae13-c063738be75f None None] No provider configs found in /etc/nova/provider_config/. If files are present, ensure the Nova process has access. [ 479.495705] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-60573931-f6f4-4cba-ae13-c063738be75f None None] Expecting reply to msg b827f32be73c484bbe5538a0300887ee in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 479.512365] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b827f32be73c484bbe5538a0300887ee [ 479.998260] env[62096]: DEBUG nova.context [None req-60573931-f6f4-4cba-ae13-c063738be75f None None] Found 2 cells: 00000000-0000-0000-0000-000000000000(cell0),6ac90b00-a604-49f8-9a52-4b0e3d4560ab(cell1) {{(pid=62096) load_cells /opt/stack/nova/nova/context.py:464}} [ 480.000293] env[62096]: DEBUG oslo_concurrency.lockutils [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] Acquiring lock "00000000-0000-0000-0000-000000000000" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 480.000525] env[62096]: DEBUG oslo_concurrency.lockutils [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] Lock "00000000-0000-0000-0000-000000000000" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 480.001193] env[62096]: DEBUG oslo_concurrency.lockutils [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] Lock "00000000-0000-0000-0000-000000000000" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 480.001611] env[62096]: DEBUG oslo_concurrency.lockutils [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] Acquiring lock "6ac90b00-a604-49f8-9a52-4b0e3d4560ab" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 480.001833] env[62096]: DEBUG oslo_concurrency.lockutils [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] Lock "6ac90b00-a604-49f8-9a52-4b0e3d4560ab" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 480.002856] env[62096]: DEBUG oslo_concurrency.lockutils [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] Lock "6ac90b00-a604-49f8-9a52-4b0e3d4560ab" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 480.023379] env[62096]: INFO dbcounter [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] Registered counter for database nova_cell0 [ 480.031494] env[62096]: INFO dbcounter [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] Registered counter for database nova_cell1 [ 480.034701] env[62096]: DEBUG oslo_db.sqlalchemy.engines [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=62096) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 480.035061] env[62096]: DEBUG oslo_db.sqlalchemy.engines [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=62096) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 480.039881] env[62096]: ERROR nova.db.main.api [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 480.039881] env[62096]: result = function(*args, **kwargs) [ 480.039881] env[62096]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 480.039881] env[62096]: return func(*args, **kwargs) [ 480.039881] env[62096]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 480.039881] env[62096]: result = fn(*args, **kwargs) [ 480.039881] env[62096]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 480.039881] env[62096]: return f(*args, **kwargs) [ 480.039881] env[62096]: File "/opt/stack/nova/nova/objects/service.py", line 553, in _db_service_get_minimum_version [ 480.039881] env[62096]: return db.service_get_minimum_version(context, binaries) [ 480.039881] env[62096]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 480.039881] env[62096]: _check_db_access() [ 480.039881] env[62096]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 480.039881] env[62096]: stacktrace = ''.join(traceback.format_stack()) [ 480.039881] env[62096]: [ 480.040673] env[62096]: ERROR nova.db.main.api [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 480.040673] env[62096]: result = function(*args, **kwargs) [ 480.040673] env[62096]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 480.040673] env[62096]: return func(*args, **kwargs) [ 480.040673] env[62096]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 480.040673] env[62096]: result = fn(*args, **kwargs) [ 480.040673] env[62096]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 480.040673] env[62096]: return f(*args, **kwargs) [ 480.040673] env[62096]: File "/opt/stack/nova/nova/objects/service.py", line 553, in _db_service_get_minimum_version [ 480.040673] env[62096]: return db.service_get_minimum_version(context, binaries) [ 480.040673] env[62096]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 480.040673] env[62096]: _check_db_access() [ 480.040673] env[62096]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 480.040673] env[62096]: stacktrace = ''.join(traceback.format_stack()) [ 480.040673] env[62096]: [ 480.041205] env[62096]: WARNING nova.objects.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] Failed to get minimum service version for cell 6ac90b00-a604-49f8-9a52-4b0e3d4560ab [ 480.041205] env[62096]: WARNING nova.objects.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] Failed to get minimum service version for cell 00000000-0000-0000-0000-000000000000 [ 480.041598] env[62096]: DEBUG oslo_concurrency.lockutils [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] Acquiring lock "singleton_lock" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 480.041777] env[62096]: DEBUG oslo_concurrency.lockutils [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] Acquired lock "singleton_lock" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 480.042033] env[62096]: DEBUG oslo_concurrency.lockutils [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] Releasing lock "singleton_lock" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 480.042345] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] Full set of CONF: {{(pid=62096) _wait_for_exit_or_signal /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/service.py:363}} [ 480.042487] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] ******************************************************************************** {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2600}} [ 480.042613] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] Configuration options gathered from: {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2601}} [ 480.042746] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] command line args: ['--config-file', '/etc/nova/nova.conf', '--config-file', '/etc/nova/nova-cpu-common.conf', '--config-file', '/etc/nova/nova-cpu-1.conf'] {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 480.042930] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] config files: ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2603}} [ 480.043058] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] ================================================================================ {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2605}} [ 480.043260] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] allow_resize_to_same_host = True {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.043425] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] arq_binding_timeout = 300 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.043553] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] backdoor_port = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.043677] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] backdoor_socket = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.043863] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] block_device_allocate_retries = 60 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.044132] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] block_device_allocate_retries_interval = 3 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.044320] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] cert = self.pem {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.044487] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] compute_driver = vmwareapi.VMwareVCDriver {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.044655] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] compute_monitors = [] {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.044823] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] config_dir = [] {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.044986] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] config_drive_format = iso9660 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.045123] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] config_file = ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.045286] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] config_source = [] {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.045447] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] console_host = devstack {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.045606] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] control_exchange = nova {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.045759] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] cpu_allocation_ratio = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.045913] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] daemon = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.046078] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] debug = True {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.046230] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] default_access_ip_network_name = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.046391] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] default_availability_zone = nova {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.046544] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] default_ephemeral_format = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.046701] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] default_green_pool_size = 1000 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.046953] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] default_log_levels = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'glanceclient=WARN', 'oslo.privsep.daemon=INFO'] {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.047133] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] default_schedule_zone = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.047288] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] disk_allocation_ratio = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.047447] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] enable_new_services = True {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.047622] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] enabled_apis = ['osapi_compute'] {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.047781] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] enabled_ssl_apis = [] {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.047939] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] flat_injected = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.048109] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] force_config_drive = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.048266] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] force_raw_images = True {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.048429] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] graceful_shutdown_timeout = 5 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.048585] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] heal_instance_info_cache_interval = 60 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.048791] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] host = cpu-1 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.048957] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] initial_cpu_allocation_ratio = 4.0 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.049115] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] initial_disk_allocation_ratio = 1.0 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.049269] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] initial_ram_allocation_ratio = 1.0 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.049473] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] injected_network_template = /opt/stack/nova/nova/virt/interfaces.template {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.049631] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] instance_build_timeout = 0 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.049789] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] instance_delete_interval = 300 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.049964] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] instance_format = [instance: %(uuid)s] {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.050144] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] instance_name_template = instance-%08x {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.050304] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] instance_usage_audit = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.050470] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] instance_usage_audit_period = month {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.050630] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] instance_uuid_format = [instance: %(uuid)s] {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.050795] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] instances_path = /opt/stack/data/nova/instances {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.050958] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] internal_service_availability_zone = internal {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.051107] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] key = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.051263] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] live_migration_retry_count = 30 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.051425] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] log_color = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.051585] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] log_config_append = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.051777] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] log_date_format = %Y-%m-%d %H:%M:%S {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.051949] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] log_dir = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.052150] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] log_file = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.052282] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] log_options = True {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.052444] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] log_rotate_interval = 1 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.052611] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] log_rotate_interval_type = days {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.052775] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] log_rotation_type = none {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.052902] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] logging_context_format_string = %(color)s%(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(project_name)s %(user_name)s%(color)s] %(instance)s%(color)s%(message)s {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.053052] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] logging_debug_format_suffix = {{(pid=%(process)d) %(funcName)s %(pathname)s:%(lineno)d}} {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.053236] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] logging_default_format_string = %(color)s%(levelname)s %(name)s [-%(color)s] %(instance)s%(color)s%(message)s {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.053404] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] logging_exception_prefix = ERROR %(name)s %(instance)s {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.053529] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] logging_user_identity_format = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.053691] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] long_rpc_timeout = 1800 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.053877] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] max_concurrent_builds = 10 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.054048] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] max_concurrent_live_migrations = 1 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.054207] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] max_concurrent_snapshots = 5 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.054366] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] max_local_block_devices = 3 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.054524] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] max_logfile_count = 30 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.054678] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] max_logfile_size_mb = 200 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.054837] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] maximum_instance_delete_attempts = 5 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.055001] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] metadata_listen = 0.0.0.0 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.055162] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] metadata_listen_port = 8775 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.055327] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] metadata_workers = 2 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.055486] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] migrate_max_retries = -1 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.055647] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] mkisofs_cmd = genisoimage {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.055850] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] my_block_storage_ip = 10.180.1.21 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.055984] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] my_ip = 10.180.1.21 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.056186] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] network_allocate_retries = 0 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.056367] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] non_inheritable_image_properties = ['cache_in_nova', 'bittorrent'] {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.056531] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] osapi_compute_listen = 0.0.0.0 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.056689] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] osapi_compute_listen_port = 8774 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.056851] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] osapi_compute_unique_server_name_scope = {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.057016] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] osapi_compute_workers = 2 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.057176] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] password_length = 12 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.057333] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] periodic_enable = True {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.057489] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] periodic_fuzzy_delay = 60 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.057652] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] pointer_model = usbtablet {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.057813] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] preallocate_images = none {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.057971] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] publish_errors = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.058097] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] pybasedir = /opt/stack/nova {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.058258] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] ram_allocation_ratio = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.058416] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] rate_limit_burst = 0 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.058579] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] rate_limit_except_level = CRITICAL {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.058736] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] rate_limit_interval = 0 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.058893] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] reboot_timeout = 0 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.059064] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] reclaim_instance_interval = 0 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.059230] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] record = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.059396] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] reimage_timeout_per_gb = 60 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.059558] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] report_interval = 120 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.059715] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] rescue_timeout = 0 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.059869] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] reserved_host_cpus = 0 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.060075] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] reserved_host_disk_mb = 0 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.060196] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] reserved_host_memory_mb = 512 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.060355] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] reserved_huge_pages = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.060513] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] resize_confirm_window = 0 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.060669] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] resize_fs_using_block_device = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.060827] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] resume_guests_state_on_host_boot = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.060991] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] rootwrap_config = /etc/nova/rootwrap.conf {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.061149] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] rpc_response_timeout = 60 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.061306] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] run_external_periodic_tasks = True {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.061468] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] running_deleted_instance_action = reap {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.061622] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] running_deleted_instance_poll_interval = 1800 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.061803] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] running_deleted_instance_timeout = 0 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.062004] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] scheduler_instance_sync_interval = 120 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.062214] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] service_down_time = 720 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.062388] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] servicegroup_driver = db {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.062550] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] shelved_offload_time = 0 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.062706] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] shelved_poll_interval = 3600 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.062872] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] shutdown_timeout = 0 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.063031] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] source_is_ipv6 = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.063187] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] ssl_only = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.063423] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] state_path = /opt/stack/data/n-cpu-1 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.063589] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] sync_power_state_interval = 600 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.063748] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] sync_power_state_pool_size = 1000 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.063939] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] syslog_log_facility = LOG_USER {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.064113] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] tempdir = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.064276] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] timeout_nbd = 10 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.064440] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] transport_url = **** {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.064598] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] update_resources_interval = 0 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.064753] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] use_cow_images = True {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.064943] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] use_eventlog = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.065105] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] use_journal = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.065262] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] use_json = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.065417] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] use_rootwrap_daemon = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.065572] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] use_stderr = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.065725] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] use_syslog = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.065879] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] vcpu_pin_set = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.066043] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] vif_plugging_is_fatal = True {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.066208] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] vif_plugging_timeout = 300 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.066369] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] virt_mkfs = [] {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.066525] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] volume_usage_poll_interval = 0 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.066684] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] watch_log_file = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.066842] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] web = /usr/share/spice-html5 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 480.067066] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] oslo_concurrency.disable_process_locking = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.067355] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] oslo_concurrency.lock_path = /opt/stack/data/n-cpu-1 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.067534] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] oslo_messaging_metrics.metrics_buffer_size = 1000 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.067699] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] oslo_messaging_metrics.metrics_enabled = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.067867] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] oslo_messaging_metrics.metrics_process_name = {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.068056] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] oslo_messaging_metrics.metrics_socket_file = /var/tmp/metrics_collector.sock {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.068213] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] oslo_messaging_metrics.metrics_thread_stop_timeout = 10 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.068392] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] api.auth_strategy = keystone {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.068558] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] api.compute_link_prefix = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.068728] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] api.config_drive_skip_versions = 1.0 2007-01-19 2007-03-01 2007-08-29 2007-10-10 2007-12-15 2008-02-01 2008-09-01 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.068898] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] api.dhcp_domain = novalocal {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.069062] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] api.enable_instance_password = True {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.069224] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] api.glance_link_prefix = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.069389] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] api.instance_list_cells_batch_fixed_size = 100 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.069557] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] api.instance_list_cells_batch_strategy = distributed {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.069716] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] api.instance_list_per_project_cells = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.069875] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] api.list_records_by_skipping_down_cells = True {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.070057] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] api.local_metadata_per_cell = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.070239] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] api.max_limit = 1000 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.070406] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] api.metadata_cache_expiration = 15 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.070578] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] api.neutron_default_tenant_id = default {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.070746] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] api.response_validation = warn {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.070917] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] api.use_neutron_default_nets = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.071088] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] api.vendordata_dynamic_connect_timeout = 5 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.071248] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] api.vendordata_dynamic_failure_fatal = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.071413] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] api.vendordata_dynamic_read_timeout = 5 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.071584] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] api.vendordata_dynamic_ssl_certfile = {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.071775] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] api.vendordata_dynamic_targets = [] {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.071948] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] api.vendordata_jsonfile_path = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.072147] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] api.vendordata_providers = ['StaticJSON'] {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.072341] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] cache.backend = dogpile.cache.memcached {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.072507] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] cache.backend_argument = **** {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.072674] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] cache.config_prefix = cache.oslo {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.072842] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] cache.dead_timeout = 60.0 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.073015] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] cache.debug_cache_backend = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.073196] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] cache.enable_retry_client = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.073359] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] cache.enable_socket_keepalive = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.073530] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] cache.enabled = True {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.073692] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] cache.enforce_fips_mode = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.073886] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] cache.expiration_time = 600 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.074061] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] cache.hashclient_retry_attempts = 2 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.074226] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] cache.hashclient_retry_delay = 1.0 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.074387] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] cache.memcache_dead_retry = 300 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.074541] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] cache.memcache_password = **** {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.074704] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] cache.memcache_pool_connection_get_timeout = 10 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.074867] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] cache.memcache_pool_flush_on_reconnect = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.075032] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] cache.memcache_pool_maxsize = 10 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.075195] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] cache.memcache_pool_unused_timeout = 60 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.075355] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] cache.memcache_sasl_enabled = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.075530] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] cache.memcache_servers = ['localhost:11211'] {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.075696] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] cache.memcache_socket_timeout = 1.0 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.075855] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] cache.memcache_username = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.076038] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] cache.proxies = [] {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.076233] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] cache.redis_db = 0 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.076396] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] cache.redis_password = **** {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.076590] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] cache.redis_sentinel_service_name = mymaster {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.076770] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] cache.redis_sentinels = ['localhost:26379'] {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.076937] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] cache.redis_server = localhost:6379 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.077102] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] cache.redis_socket_timeout = 1.0 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.077261] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] cache.redis_username = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.077421] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] cache.retry_attempts = 2 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.077582] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] cache.retry_delay = 0.0 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.077741] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] cache.socket_keepalive_count = 1 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.077902] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] cache.socket_keepalive_idle = 1 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.078060] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] cache.socket_keepalive_interval = 1 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.078215] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] cache.tls_allowed_ciphers = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.078368] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] cache.tls_cafile = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.078521] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] cache.tls_certfile = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.078678] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] cache.tls_enabled = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.078830] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] cache.tls_keyfile = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.078998] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] cinder.auth_section = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.079190] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] cinder.auth_type = password {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.079356] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] cinder.cafile = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.079527] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] cinder.catalog_info = volumev3::publicURL {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.079685] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] cinder.certfile = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.079845] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] cinder.collect_timing = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.080013] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] cinder.cross_az_attach = True {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.080185] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] cinder.debug = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.080344] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] cinder.endpoint_template = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.080505] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] cinder.http_retries = 3 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.080665] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] cinder.insecure = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.080822] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] cinder.keyfile = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.080992] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] cinder.os_region_name = RegionOne {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.081153] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] cinder.split_loggers = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.081310] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] cinder.timeout = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.081477] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] compute.consecutive_build_service_disable_threshold = 10 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.081634] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] compute.cpu_dedicated_set = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.081818] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] compute.cpu_shared_set = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.081992] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] compute.image_type_exclude_list = [] {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.082177] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] compute.live_migration_wait_for_vif_plug = True {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.082352] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] compute.max_concurrent_disk_ops = 0 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.082517] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] compute.max_disk_devices_to_attach = -1 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.082679] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] compute.packing_host_numa_cells_allocation_strategy = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.082849] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] compute.provider_config_location = /etc/nova/provider_config/ {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.083013] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] compute.resource_provider_association_refresh = 300 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.083174] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] compute.shutdown_retry_interval = 10 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.083351] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] compute.vmdk_allowed_types = ['streamOptimized', 'monolithicSparse'] {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.083529] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] conductor.workers = 2 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.083703] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] console.allowed_origins = [] {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.083897] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] console.ssl_ciphers = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.084087] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] console.ssl_minimum_version = default {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.084261] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] consoleauth.enforce_session_timeout = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.084431] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] consoleauth.token_ttl = 600 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.084600] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] cyborg.cafile = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.084759] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] cyborg.certfile = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.084951] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] cyborg.collect_timing = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.085117] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] cyborg.connect_retries = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.085275] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] cyborg.connect_retry_delay = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.085430] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] cyborg.endpoint_override = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.085588] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] cyborg.insecure = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.085742] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] cyborg.keyfile = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.085899] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] cyborg.max_version = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.086055] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] cyborg.min_version = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.086207] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] cyborg.region_name = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.086362] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] cyborg.retriable_status_codes = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.086515] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] cyborg.service_name = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.086681] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] cyborg.service_type = accelerator {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.086838] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] cyborg.split_loggers = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.087055] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] cyborg.status_code_retries = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.087237] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] cyborg.status_code_retry_delay = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.087398] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] cyborg.timeout = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.087577] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] cyborg.valid_interfaces = ['internal', 'public'] {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.087738] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] cyborg.version = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.087942] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] database.backend = sqlalchemy {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.088137] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] database.connection = **** {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.088310] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] database.connection_debug = 0 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.088477] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] database.connection_parameters = {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.088643] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] database.connection_recycle_time = 3600 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.088806] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] database.connection_trace = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.088967] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] database.db_inc_retry_interval = True {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.089127] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] database.db_max_retries = 20 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.089288] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] database.db_max_retry_interval = 10 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.089450] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] database.db_retry_interval = 1 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.089611] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] database.max_overflow = 50 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.089770] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] database.max_pool_size = 5 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.089929] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] database.max_retries = 10 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.090095] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] database.mysql_sql_mode = TRADITIONAL {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.090253] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] database.mysql_wsrep_sync_wait = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.090410] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] database.pool_timeout = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.090570] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] database.retry_interval = 10 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.090724] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] database.slave_connection = **** {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.090899] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] database.sqlite_synchronous = True {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.091076] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] database.use_db_reconnect = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.091253] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] api_database.backend = sqlalchemy {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.091417] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] api_database.connection = **** {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.091579] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] api_database.connection_debug = 0 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.091765] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] api_database.connection_parameters = {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.091940] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] api_database.connection_recycle_time = 3600 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.092118] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] api_database.connection_trace = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.092281] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] api_database.db_inc_retry_interval = True {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.092442] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] api_database.db_max_retries = 20 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.092599] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] api_database.db_max_retry_interval = 10 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.092758] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] api_database.db_retry_interval = 1 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.092917] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] api_database.max_overflow = 50 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.093078] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] api_database.max_pool_size = 5 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.093236] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] api_database.max_retries = 10 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.093401] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] api_database.mysql_sql_mode = TRADITIONAL {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.093559] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] api_database.mysql_wsrep_sync_wait = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.093714] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] api_database.pool_timeout = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.093918] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] api_database.retry_interval = 10 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.094094] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] api_database.slave_connection = **** {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.094259] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] api_database.sqlite_synchronous = True {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.094435] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] devices.enabled_mdev_types = [] {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.094610] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] ephemeral_storage_encryption.cipher = aes-xts-plain64 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.094813] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] ephemeral_storage_encryption.default_format = luks {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.094939] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] ephemeral_storage_encryption.enabled = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.095104] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] ephemeral_storage_encryption.key_size = 512 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.095271] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] glance.api_servers = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.095435] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] glance.cafile = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.095595] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] glance.certfile = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.095755] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] glance.collect_timing = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.095913] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] glance.connect_retries = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.096083] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] glance.connect_retry_delay = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.096250] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] glance.debug = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.096412] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] glance.default_trusted_certificate_ids = [] {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.096570] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] glance.enable_certificate_validation = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.096729] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] glance.enable_rbd_download = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.096885] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] glance.endpoint_override = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.097074] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] glance.insecure = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.097238] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] glance.keyfile = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.097396] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] glance.max_version = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.097549] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] glance.min_version = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.097708] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] glance.num_retries = 3 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.097874] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] glance.rbd_ceph_conf = {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.098035] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] glance.rbd_connect_timeout = 5 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.098199] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] glance.rbd_pool = {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.098361] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] glance.rbd_user = {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.098516] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] glance.region_name = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.098673] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] glance.retriable_status_codes = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.098826] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] glance.service_name = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.098990] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] glance.service_type = image {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.099146] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] glance.split_loggers = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.099302] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] glance.status_code_retries = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.099455] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] glance.status_code_retry_delay = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.099609] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] glance.timeout = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.099783] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] glance.valid_interfaces = ['internal', 'public'] {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.099955] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] glance.verify_glance_signatures = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.100153] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] glance.version = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.100324] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] guestfs.debug = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.100491] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] mks.enabled = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.100845] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] mks.mksproxy_base_url = http://127.0.0.1:6090/ {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.101035] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] image_cache.manager_interval = 2400 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.101201] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] image_cache.precache_concurrency = 1 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.101366] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] image_cache.remove_unused_base_images = True {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.101532] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] image_cache.remove_unused_original_minimum_age_seconds = 86400 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.101704] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] image_cache.remove_unused_resized_minimum_age_seconds = 3600 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.101959] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] image_cache.subdirectory_name = _base {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.102218] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] ironic.api_max_retries = 60 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.102482] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] ironic.api_retry_interval = 2 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.102750] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] ironic.auth_section = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.103019] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] ironic.auth_type = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.103276] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] ironic.cafile = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.103543] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] ironic.certfile = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.103797] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] ironic.collect_timing = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.104053] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] ironic.conductor_group = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.104260] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] ironic.connect_retries = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.104430] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] ironic.connect_retry_delay = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.104592] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] ironic.endpoint_override = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.104755] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] ironic.insecure = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.104917] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] ironic.keyfile = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.105078] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] ironic.max_version = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.105236] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] ironic.min_version = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.105401] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] ironic.peer_list = [] {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.105557] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] ironic.region_name = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.105715] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] ironic.retriable_status_codes = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.105878] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] ironic.serial_console_state_timeout = 10 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.106036] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] ironic.service_name = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.106202] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] ironic.service_type = baremetal {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.106360] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] ironic.shard = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.106522] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] ironic.split_loggers = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.106679] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] ironic.status_code_retries = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.106836] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] ironic.status_code_retry_delay = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.106995] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] ironic.timeout = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.107175] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] ironic.valid_interfaces = ['internal', 'public'] {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.107332] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] ironic.version = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.107514] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] key_manager.backend = nova.keymgr.conf_key_mgr.ConfKeyManager {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.107681] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] key_manager.fixed_key = **** {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.107859] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] barbican.auth_endpoint = http://localhost/identity/v3 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.108030] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] barbican.barbican_api_version = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.108193] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] barbican.barbican_endpoint = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.108362] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] barbican.barbican_endpoint_type = public {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.108517] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] barbican.barbican_region_name = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.108672] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] barbican.cafile = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.108827] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] barbican.certfile = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.108988] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] barbican.collect_timing = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.109142] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] barbican.insecure = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.109295] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] barbican.keyfile = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.109453] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] barbican.number_of_retries = 60 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.109611] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] barbican.retry_delay = 1 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.109769] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] barbican.send_service_user_token = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.109928] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] barbican.split_loggers = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.110084] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] barbican.timeout = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.110241] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] barbican.verify_ssl = True {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.110395] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] barbican.verify_ssl_path = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.110558] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] barbican_service_user.auth_section = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.110716] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] barbican_service_user.auth_type = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.111234] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] barbican_service_user.cafile = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.111234] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] barbican_service_user.certfile = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.111234] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] barbican_service_user.collect_timing = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.111550] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] barbican_service_user.insecure = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.111550] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] barbican_service_user.keyfile = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.111638] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] barbican_service_user.split_loggers = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.111781] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] barbican_service_user.timeout = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.111956] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] vault.approle_role_id = **** {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.112128] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] vault.approle_secret_id = **** {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.112298] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] vault.kv_mountpoint = secret {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.112452] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] vault.kv_path = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.112612] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] vault.kv_version = 2 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.112767] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] vault.namespace = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.112920] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] vault.root_token_id = **** {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.113076] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] vault.ssl_ca_crt_file = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.113237] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] vault.timeout = 60.0 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.113394] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] vault.use_ssl = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.113558] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] vault.vault_url = http://127.0.0.1:8200 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.113725] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] keystone.auth_section = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.113915] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] keystone.auth_type = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.114081] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] keystone.cafile = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.114236] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] keystone.certfile = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.114397] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] keystone.collect_timing = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.114556] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] keystone.connect_retries = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.114712] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] keystone.connect_retry_delay = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.114894] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] keystone.endpoint_override = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.115061] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] keystone.insecure = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.115217] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] keystone.keyfile = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.115371] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] keystone.max_version = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.115522] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] keystone.min_version = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.115674] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] keystone.region_name = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.115830] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] keystone.retriable_status_codes = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.115985] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] keystone.service_name = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.116165] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] keystone.service_type = identity {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.116325] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] keystone.split_loggers = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.116479] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] keystone.status_code_retries = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.116635] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] keystone.status_code_retry_delay = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.116789] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] keystone.timeout = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.116966] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] keystone.valid_interfaces = ['internal', 'public'] {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.117122] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] keystone.version = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.117321] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] libvirt.connection_uri = {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.117480] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] libvirt.cpu_mode = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.117645] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] libvirt.cpu_model_extra_flags = [] {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.117813] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] libvirt.cpu_models = [] {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.117981] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] libvirt.cpu_power_governor_high = performance {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.118143] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] libvirt.cpu_power_governor_low = powersave {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.118303] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] libvirt.cpu_power_management = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.118470] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] libvirt.cpu_power_management_strategy = cpu_state {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.118894] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] libvirt.device_detach_attempts = 8 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.118894] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] libvirt.device_detach_timeout = 20 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.118983] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] libvirt.disk_cachemodes = [] {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.119082] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] libvirt.disk_prefix = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.119245] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] libvirt.enabled_perf_events = [] {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.119405] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] libvirt.file_backed_memory = 0 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.119567] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] libvirt.gid_maps = [] {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.119720] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] libvirt.hw_disk_discard = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.119874] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] libvirt.hw_machine_type = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.120109] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] libvirt.images_rbd_ceph_conf = {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.120219] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] libvirt.images_rbd_glance_copy_poll_interval = 15 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.120380] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] libvirt.images_rbd_glance_copy_timeout = 600 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.120545] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] libvirt.images_rbd_glance_store_name = {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.120709] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] libvirt.images_rbd_pool = rbd {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.120875] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] libvirt.images_type = default {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.121032] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] libvirt.images_volume_group = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.121189] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] libvirt.inject_key = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.121348] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] libvirt.inject_partition = -2 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.121506] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] libvirt.inject_password = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.121662] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] libvirt.iscsi_iface = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.121847] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] libvirt.iser_use_multipath = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.122015] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] libvirt.live_migration_bandwidth = 0 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.122175] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] libvirt.live_migration_completion_timeout = 800 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.122334] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] libvirt.live_migration_downtime = 500 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.122493] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] libvirt.live_migration_downtime_delay = 75 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.122652] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] libvirt.live_migration_downtime_steps = 10 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.122809] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] libvirt.live_migration_inbound_addr = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.122970] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] libvirt.live_migration_permit_auto_converge = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.123127] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] libvirt.live_migration_permit_post_copy = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.123282] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] libvirt.live_migration_scheme = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.123456] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] libvirt.live_migration_timeout_action = abort {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.123620] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] libvirt.live_migration_tunnelled = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.123780] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] libvirt.live_migration_uri = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.123966] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] libvirt.live_migration_with_native_tls = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.124145] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] libvirt.max_queues = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.124310] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] libvirt.mem_stats_period_seconds = 10 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.124535] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] libvirt.migration_inbound_addr = 10.180.1.21 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.124696] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] libvirt.nfs_mount_options = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.125345] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] libvirt.nfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.125531] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] libvirt.num_aoe_discover_tries = 3 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.125700] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] libvirt.num_iser_scan_tries = 5 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.125863] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] libvirt.num_memory_encrypted_guests = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.126029] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] libvirt.num_nvme_discover_tries = 5 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.126193] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] libvirt.num_pcie_ports = 0 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.126362] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] libvirt.num_volume_scan_tries = 5 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.126528] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] libvirt.pmem_namespaces = [] {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.126688] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] libvirt.quobyte_client_cfg = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.126984] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] libvirt.quobyte_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.127161] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] libvirt.rbd_connect_timeout = 5 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.127330] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] libvirt.rbd_destroy_volume_retries = 12 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.127496] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] libvirt.rbd_destroy_volume_retry_interval = 5 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.127656] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] libvirt.rbd_secret_uuid = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.127815] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] libvirt.rbd_user = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.127982] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] libvirt.realtime_scheduler_priority = 1 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.128171] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] libvirt.remote_filesystem_transport = ssh {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.128333] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] libvirt.rescue_image_id = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.128492] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] libvirt.rescue_kernel_id = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.128649] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] libvirt.rescue_ramdisk_id = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.128816] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] libvirt.rng_dev_path = /dev/urandom {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.128977] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] libvirt.rx_queue_size = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.129143] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] libvirt.smbfs_mount_options = {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.129421] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] libvirt.smbfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.129592] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] libvirt.snapshot_compression = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.129751] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] libvirt.snapshot_image_format = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.129967] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] libvirt.snapshots_directory = /opt/stack/data/nova/instances/snapshots {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.130134] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] libvirt.sparse_logical_volumes = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.130295] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] libvirt.swtpm_enabled = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.130462] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] libvirt.swtpm_group = tss {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.130628] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] libvirt.swtpm_user = tss {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.130795] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] libvirt.sysinfo_serial = unique {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.130956] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] libvirt.tb_cache_size = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.131113] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] libvirt.tx_queue_size = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.131277] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] libvirt.uid_maps = [] {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.131439] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] libvirt.use_virtio_for_bridges = True {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.131609] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] libvirt.virt_type = kvm {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.131798] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] libvirt.volume_clear = zero {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.131969] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] libvirt.volume_clear_size = 0 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.132149] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] libvirt.volume_use_multipath = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.132309] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] libvirt.vzstorage_cache_path = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.132477] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] libvirt.vzstorage_log_path = /var/log/vstorage/%(cluster_name)s/nova.log.gz {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.132644] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] libvirt.vzstorage_mount_group = qemu {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.132808] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] libvirt.vzstorage_mount_opts = [] {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.132976] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] libvirt.vzstorage_mount_perms = 0770 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.133247] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] libvirt.vzstorage_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.133421] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] libvirt.vzstorage_mount_user = stack {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.133587] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] libvirt.wait_soft_reboot_seconds = 120 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.133788] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] neutron.auth_section = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.133987] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] neutron.auth_type = password {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.134151] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] neutron.cafile = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.134309] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] neutron.certfile = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.134471] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] neutron.collect_timing = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.134630] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] neutron.connect_retries = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.134795] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] neutron.connect_retry_delay = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.134984] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] neutron.default_floating_pool = public {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.135184] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] neutron.endpoint_override = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.135304] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] neutron.extension_sync_interval = 600 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.135464] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] neutron.http_retries = 3 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.135624] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] neutron.insecure = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.135782] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] neutron.keyfile = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.135939] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] neutron.max_version = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.136120] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] neutron.metadata_proxy_shared_secret = **** {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.136280] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] neutron.min_version = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.136443] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] neutron.ovs_bridge = br-int {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.136605] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] neutron.physnets = [] {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.136769] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] neutron.region_name = RegionOne {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.136959] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] neutron.retriable_status_codes = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.137138] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] neutron.service_metadata_proxy = True {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.137314] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] neutron.service_name = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.137487] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] neutron.service_type = network {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.137646] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] neutron.split_loggers = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.137805] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] neutron.status_code_retries = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.137963] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] neutron.status_code_retry_delay = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.138117] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] neutron.timeout = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.138294] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] neutron.valid_interfaces = ['internal', 'public'] {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.138451] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] neutron.version = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.138622] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] notifications.bdms_in_notifications = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.138795] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] notifications.default_level = INFO {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.138967] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] notifications.notification_format = unversioned {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.139129] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] notifications.notify_on_state_change = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.139302] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] notifications.versioned_notifications_topics = ['versioned_notifications'] {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.139476] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] pci.alias = [] {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.139644] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] pci.device_spec = [] {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.139804] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] pci.report_in_placement = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.139974] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] placement.auth_section = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.140159] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] placement.auth_type = password {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.140326] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] placement.auth_url = http://10.180.1.21/identity {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.140485] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] placement.cafile = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.140638] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] placement.certfile = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.140797] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] placement.collect_timing = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.140956] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] placement.connect_retries = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.141109] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] placement.connect_retry_delay = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.141263] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] placement.default_domain_id = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.141415] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] placement.default_domain_name = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.141565] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] placement.domain_id = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.141734] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] placement.domain_name = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.141907] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] placement.endpoint_override = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.142068] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] placement.insecure = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.142221] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] placement.keyfile = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.142373] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] placement.max_version = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.142524] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] placement.min_version = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.142687] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] placement.password = **** {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.142841] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] placement.project_domain_id = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.143003] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] placement.project_domain_name = Default {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.143165] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] placement.project_id = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.143332] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] placement.project_name = service {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.143494] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] placement.region_name = RegionOne {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.143652] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] placement.retriable_status_codes = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.143836] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] placement.service_name = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.144053] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] placement.service_type = placement {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.144182] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] placement.split_loggers = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.144344] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] placement.status_code_retries = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.144519] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] placement.status_code_retry_delay = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.144700] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] placement.system_scope = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.144863] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] placement.timeout = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.145018] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] placement.trust_id = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.145171] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] placement.user_domain_id = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.145335] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] placement.user_domain_name = Default {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.145490] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] placement.user_id = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.145656] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] placement.username = nova {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.145834] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] placement.valid_interfaces = ['internal', 'public'] {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.145991] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] placement.version = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.146163] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] quota.cores = 20 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.146324] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] quota.count_usage_from_placement = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.146491] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] quota.driver = nova.quota.DbQuotaDriver {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.146657] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] quota.injected_file_content_bytes = 10240 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.146819] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] quota.injected_file_path_length = 255 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.146981] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] quota.injected_files = 5 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.147143] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] quota.instances = 10 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.147305] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] quota.key_pairs = 100 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.147468] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] quota.metadata_items = 128 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.147629] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] quota.ram = 51200 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.147787] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] quota.recheck_quota = True {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.147986] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] quota.server_group_members = 10 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.148202] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] quota.server_groups = 10 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.148349] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] scheduler.discover_hosts_in_cells_interval = -1 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.148512] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] scheduler.enable_isolated_aggregate_filtering = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.148691] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] scheduler.image_metadata_prefilter = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.148853] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] scheduler.limit_tenants_to_placement_aggregate = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.149050] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] scheduler.max_attempts = 3 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.149216] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] scheduler.max_placement_results = 1000 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.149378] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] scheduler.placement_aggregate_required_for_tenants = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.149535] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] scheduler.query_placement_for_image_type_support = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.149691] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] scheduler.query_placement_for_routed_network_aggregates = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.149861] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] scheduler.workers = 2 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.150031] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] filter_scheduler.aggregate_image_properties_isolation_namespace = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.150198] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] filter_scheduler.aggregate_image_properties_isolation_separator = . {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.150372] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] filter_scheduler.available_filters = ['nova.scheduler.filters.all_filters'] {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.150536] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] filter_scheduler.build_failure_weight_multiplier = 1000000.0 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.150695] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] filter_scheduler.cpu_weight_multiplier = 1.0 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.150853] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] filter_scheduler.cross_cell_move_weight_multiplier = 1000000.0 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.151012] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] filter_scheduler.disk_weight_multiplier = 1.0 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.151194] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] filter_scheduler.enabled_filters = ['ComputeFilter', 'ComputeCapabilitiesFilter', 'ImagePropertiesFilter', 'ServerGroupAntiAffinityFilter', 'ServerGroupAffinityFilter', 'SameHostFilter', 'DifferentHostFilter'] {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.151358] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] filter_scheduler.host_subset_size = 1 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.151518] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] filter_scheduler.hypervisor_version_weight_multiplier = 1.0 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.151674] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] filter_scheduler.image_properties_default_architecture = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.151868] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] filter_scheduler.io_ops_weight_multiplier = -1.0 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.152049] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] filter_scheduler.isolated_hosts = [] {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.152216] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] filter_scheduler.isolated_images = [] {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.152378] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] filter_scheduler.max_instances_per_host = 50 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.152534] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] filter_scheduler.max_io_ops_per_host = 8 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.152695] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] filter_scheduler.num_instances_weight_multiplier = 0.0 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.152853] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] filter_scheduler.pci_in_placement = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.153012] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] filter_scheduler.pci_weight_multiplier = 1.0 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.153168] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] filter_scheduler.ram_weight_multiplier = 1.0 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.153329] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] filter_scheduler.restrict_isolated_hosts_to_isolated_images = True {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.153480] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] filter_scheduler.shuffle_best_same_weighed_hosts = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.153634] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] filter_scheduler.soft_affinity_weight_multiplier = 1.0 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.153800] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] filter_scheduler.soft_anti_affinity_weight_multiplier = 1.0 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.153973] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] filter_scheduler.track_instance_changes = True {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.154148] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] filter_scheduler.weight_classes = ['nova.scheduler.weights.all_weighers'] {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.154315] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] metrics.required = True {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.154474] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] metrics.weight_multiplier = 1.0 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.154632] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] metrics.weight_of_unavailable = -10000.0 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.154794] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] metrics.weight_setting = [] {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.155109] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] serial_console.base_url = ws://127.0.0.1:6083/ {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.155281] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] serial_console.enabled = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.155455] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] serial_console.port_range = 10000:20000 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.155639] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] serial_console.proxyclient_address = 127.0.0.1 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.155820] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] serial_console.serialproxy_host = 0.0.0.0 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.156034] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] serial_console.serialproxy_port = 6083 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.156185] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] service_user.auth_section = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.156358] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] service_user.auth_type = password {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.156546] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] service_user.cafile = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.156706] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] service_user.certfile = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.156865] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] service_user.collect_timing = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.157023] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] service_user.insecure = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.157176] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] service_user.keyfile = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.157343] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] service_user.send_service_user_token = True {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.157503] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] service_user.split_loggers = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.157657] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] service_user.timeout = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.157824] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] spice.agent_enabled = True {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.157982] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] spice.enabled = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.158295] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] spice.html5proxy_base_url = http://127.0.0.1:6082/spice_auto.html {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.158484] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] spice.html5proxy_host = 0.0.0.0 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.158661] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] spice.html5proxy_port = 6082 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.158820] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] spice.image_compression = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.158977] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] spice.jpeg_compression = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.159140] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] spice.playback_compression = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.159305] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] spice.server_listen = 127.0.0.1 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.159469] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] spice.server_proxyclient_address = 127.0.0.1 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.159623] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] spice.streaming_mode = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.159778] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] spice.zlib_compression = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.159939] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] upgrade_levels.baseapi = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.160120] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] upgrade_levels.compute = auto {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.160287] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] upgrade_levels.conductor = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.160443] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] upgrade_levels.scheduler = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.160604] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] vendordata_dynamic_auth.auth_section = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.160761] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] vendordata_dynamic_auth.auth_type = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.160916] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] vendordata_dynamic_auth.cafile = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.161072] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] vendordata_dynamic_auth.certfile = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.161231] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] vendordata_dynamic_auth.collect_timing = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.161387] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] vendordata_dynamic_auth.insecure = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.161540] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] vendordata_dynamic_auth.keyfile = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.161708] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] vendordata_dynamic_auth.split_loggers = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.161880] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] vendordata_dynamic_auth.timeout = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.162055] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] vmware.api_retry_count = 10 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.162213] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] vmware.ca_file = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.162380] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] vmware.cache_prefix = devstack-image-cache {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.162542] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] vmware.cluster_name = testcl1 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.162702] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] vmware.connection_pool_size = 10 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.162858] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] vmware.console_delay_seconds = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.163021] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] vmware.datastore_regex = ^datastore.* {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.163224] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] vmware.host_ip = vc1.osci.c.eu-de-1.cloud.sap {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.163392] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] vmware.host_password = **** {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.163553] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] vmware.host_port = 443 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.163720] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] vmware.host_username = administrator@vsphere.local {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.163899] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] vmware.insecure = True {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.164077] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] vmware.integration_bridge = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.164245] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] vmware.maximum_objects = 100 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.164402] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] vmware.pbm_default_policy = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.164561] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] vmware.pbm_enabled = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.164715] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] vmware.pbm_wsdl_location = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.164883] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] vmware.serial_log_dir = /opt/vmware/vspc {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.165039] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] vmware.serial_port_proxy_uri = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.165192] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] vmware.serial_port_service_uri = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.165354] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] vmware.task_poll_interval = 0.5 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.165520] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] vmware.use_linked_clone = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.165684] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] vmware.vnc_keymap = en-us {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.165843] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] vmware.vnc_port = 5900 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.166006] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] vmware.vnc_port_total = 10000 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.166187] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] vnc.auth_schemes = ['none'] {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.166360] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] vnc.enabled = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.166651] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] vnc.novncproxy_base_url = http://127.0.0.1:6080/vnc_auto.html {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.166834] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] vnc.novncproxy_host = 0.0.0.0 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.167000] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] vnc.novncproxy_port = 6080 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.167176] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] vnc.server_listen = 127.0.0.1 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.167346] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] vnc.server_proxyclient_address = 127.0.0.1 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.167504] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] vnc.vencrypt_ca_certs = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.167658] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] vnc.vencrypt_client_cert = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.167810] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] vnc.vencrypt_client_key = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.167982] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] workarounds.disable_compute_service_check_for_ffu = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.168156] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] workarounds.disable_fallback_pcpu_query = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.168315] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] workarounds.disable_group_policy_check_upcall = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.168474] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] workarounds.disable_libvirt_livesnapshot = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.168630] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] workarounds.disable_rootwrap = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.168788] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] workarounds.enable_numa_live_migration = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.168943] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] workarounds.enable_qemu_monitor_announce_self = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.169102] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] workarounds.ensure_libvirt_rbd_instance_dir_cleanup = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.169258] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] workarounds.handle_virt_lifecycle_events = True {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.169412] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] workarounds.libvirt_disable_apic = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.169568] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] workarounds.never_download_image_if_on_rbd = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.169725] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] workarounds.qemu_monitor_announce_self_count = 3 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.169883] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] workarounds.qemu_monitor_announce_self_interval = 1 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.170040] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] workarounds.reserve_disk_resource_for_image_cache = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.170195] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] workarounds.skip_cpu_compare_at_startup = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.170349] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] workarounds.skip_cpu_compare_on_dest = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.170503] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] workarounds.skip_hypervisor_version_check_on_lm = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.170658] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] workarounds.skip_reserve_in_use_ironic_nodes = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.170811] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] workarounds.unified_limits_count_pcpu_as_vcpu = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.170974] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] workarounds.wait_for_vif_plugged_event_during_hard_reboot = [] {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.171152] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] wsgi.api_paste_config = /etc/nova/api-paste.ini {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.171316] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] wsgi.client_socket_timeout = 900 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.171477] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] wsgi.default_pool_size = 1000 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.171638] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] wsgi.keep_alive = True {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.171827] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] wsgi.max_header_line = 16384 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.171997] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] wsgi.secure_proxy_ssl_header = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.172171] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] wsgi.ssl_ca_file = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.172328] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] wsgi.ssl_cert_file = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.172484] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] wsgi.ssl_key_file = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.172644] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] wsgi.tcp_keepidle = 600 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.172812] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] wsgi.wsgi_log_format = %(client_ip)s "%(request_line)s" status: %(status_code)s len: %(body_length)s time: %(wall_seconds).7f {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.172977] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] zvm.ca_file = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.173133] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] zvm.cloud_connector_url = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.173416] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] zvm.image_tmp_path = /opt/stack/data/n-cpu-1/images {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.173587] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] zvm.reachable_timeout = 300 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.173766] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] oslo_policy.enforce_new_defaults = True {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.173962] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] oslo_policy.enforce_scope = True {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.174143] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] oslo_policy.policy_default_rule = default {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.174323] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] oslo_policy.policy_dirs = ['policy.d'] {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.174492] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] oslo_policy.policy_file = policy.yaml {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.174657] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] oslo_policy.remote_content_type = application/x-www-form-urlencoded {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.174813] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] oslo_policy.remote_ssl_ca_crt_file = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.174993] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] oslo_policy.remote_ssl_client_crt_file = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.175157] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] oslo_policy.remote_ssl_client_key_file = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.175316] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] oslo_policy.remote_ssl_verify_server_crt = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.175480] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] oslo_versionedobjects.fatal_exception_format_errors = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.175652] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] oslo_middleware.http_basic_auth_user_file = /etc/htpasswd {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.175826] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] profiler.connection_string = messaging:// {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.175989] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] profiler.enabled = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.176169] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] profiler.es_doc_type = notification {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.176333] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] profiler.es_scroll_size = 10000 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.176496] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] profiler.es_scroll_time = 2m {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.176653] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] profiler.filter_error_trace = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.176817] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] profiler.hmac_keys = **** {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.176982] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] profiler.sentinel_service_name = mymaster {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.177142] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] profiler.socket_timeout = 0.1 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.177300] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] profiler.trace_requests = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.177457] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] profiler.trace_sqlalchemy = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.177625] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] profiler_jaeger.process_tags = {} {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.177783] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] profiler_jaeger.service_name_prefix = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.178134] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] profiler_otlp.service_name_prefix = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.178134] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] remote_debug.host = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.178284] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] remote_debug.port = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.178457] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] oslo_messaging_rabbit.amqp_auto_delete = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.178618] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] oslo_messaging_rabbit.amqp_durable_queues = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.178780] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] oslo_messaging_rabbit.conn_pool_min_size = 2 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.178940] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] oslo_messaging_rabbit.conn_pool_ttl = 1200 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.179102] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] oslo_messaging_rabbit.direct_mandatory_flag = True {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.179259] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] oslo_messaging_rabbit.enable_cancel_on_failover = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.179417] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] oslo_messaging_rabbit.heartbeat_in_pthread = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.179575] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] oslo_messaging_rabbit.heartbeat_rate = 3 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.179735] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] oslo_messaging_rabbit.heartbeat_timeout_threshold = 60 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.179903] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] oslo_messaging_rabbit.hostname = devstack {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.180072] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] oslo_messaging_rabbit.kombu_compression = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.180241] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] oslo_messaging_rabbit.kombu_failover_strategy = round-robin {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.180406] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] oslo_messaging_rabbit.kombu_missing_consumer_retry_timeout = 60 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.180570] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] oslo_messaging_rabbit.kombu_reconnect_delay = 1.0 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.180735] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] oslo_messaging_rabbit.processname = nova-compute {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.180897] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] oslo_messaging_rabbit.rabbit_ha_queues = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.181061] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] oslo_messaging_rabbit.rabbit_interval_max = 30 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.181229] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] oslo_messaging_rabbit.rabbit_login_method = AMQPLAIN {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.181390] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] oslo_messaging_rabbit.rabbit_qos_prefetch_count = 0 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.181549] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] oslo_messaging_rabbit.rabbit_quorum_delivery_limit = 0 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.181725] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_bytes = 0 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.181906] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_length = 0 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.182071] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] oslo_messaging_rabbit.rabbit_quorum_queue = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.182235] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] oslo_messaging_rabbit.rabbit_retry_backoff = 2 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.182395] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] oslo_messaging_rabbit.rabbit_retry_interval = 1 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.182551] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] oslo_messaging_rabbit.rabbit_stream_fanout = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.182713] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] oslo_messaging_rabbit.rabbit_transient_queues_ttl = 1800 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.182872] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] oslo_messaging_rabbit.rabbit_transient_quorum_queue = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.183039] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] oslo_messaging_rabbit.rpc_conn_pool_size = 30 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.183201] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] oslo_messaging_rabbit.ssl = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.183368] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] oslo_messaging_rabbit.ssl_ca_file = {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.183531] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] oslo_messaging_rabbit.ssl_cert_file = {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.183688] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] oslo_messaging_rabbit.ssl_enforce_fips_mode = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.183891] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] oslo_messaging_rabbit.ssl_key_file = {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.184078] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] oslo_messaging_rabbit.ssl_version = {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.184244] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] oslo_messaging_rabbit.use_queue_manager = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.184431] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] oslo_messaging_notifications.driver = ['messagingv2'] {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.184595] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] oslo_messaging_notifications.retry = -1 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.184778] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] oslo_messaging_notifications.topics = ['notifications'] {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.184945] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] oslo_messaging_notifications.transport_url = **** {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.185116] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] oslo_limit.auth_section = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.185276] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] oslo_limit.auth_type = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.185432] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] oslo_limit.cafile = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.185584] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] oslo_limit.certfile = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.185740] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] oslo_limit.collect_timing = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.185892] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] oslo_limit.connect_retries = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.186045] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] oslo_limit.connect_retry_delay = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.186197] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] oslo_limit.endpoint_id = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.186348] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] oslo_limit.endpoint_override = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.186502] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] oslo_limit.insecure = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.186652] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] oslo_limit.keyfile = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.186802] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] oslo_limit.max_version = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.186953] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] oslo_limit.min_version = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.187103] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] oslo_limit.region_name = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.187258] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] oslo_limit.retriable_status_codes = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.187411] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] oslo_limit.service_name = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.187561] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] oslo_limit.service_type = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.187717] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] oslo_limit.split_loggers = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.187871] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] oslo_limit.status_code_retries = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.188063] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] oslo_limit.status_code_retry_delay = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.188275] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] oslo_limit.timeout = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.188441] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] oslo_limit.valid_interfaces = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.188597] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] oslo_limit.version = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.188761] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] oslo_reports.file_event_handler = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.188923] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] oslo_reports.file_event_handler_interval = 1 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.189081] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] oslo_reports.log_dir = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.189247] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] vif_plug_linux_bridge_privileged.capabilities = [12] {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.189402] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] vif_plug_linux_bridge_privileged.group = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.189699] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] vif_plug_linux_bridge_privileged.helper_command = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.189699] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] vif_plug_linux_bridge_privileged.logger_name = oslo_privsep.daemon {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.189865] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] vif_plug_linux_bridge_privileged.thread_pool_size = 8 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.190021] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] vif_plug_linux_bridge_privileged.user = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.190184] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] vif_plug_ovs_privileged.capabilities = [12, 1] {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.190339] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] vif_plug_ovs_privileged.group = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.190491] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] vif_plug_ovs_privileged.helper_command = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.190649] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] vif_plug_ovs_privileged.logger_name = oslo_privsep.daemon {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.190808] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] vif_plug_ovs_privileged.thread_pool_size = 8 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.190963] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] vif_plug_ovs_privileged.user = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.191127] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] os_vif_linux_bridge.flat_interface = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.191301] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] os_vif_linux_bridge.forward_bridge_interface = ['all'] {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.191468] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] os_vif_linux_bridge.iptables_bottom_regex = {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.191635] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] os_vif_linux_bridge.iptables_drop_action = DROP {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.191828] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] os_vif_linux_bridge.iptables_top_regex = {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.192011] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] os_vif_linux_bridge.network_device_mtu = 1500 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.192183] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] os_vif_linux_bridge.use_ipv6 = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.192342] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] os_vif_linux_bridge.vlan_interface = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.192515] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] os_vif_ovs.default_qos_type = linux-noop {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.192680] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] os_vif_ovs.isolate_vif = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.192849] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] os_vif_ovs.network_device_mtu = 1500 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.193013] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] os_vif_ovs.ovs_vsctl_timeout = 120 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.193175] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] os_vif_ovs.ovsdb_connection = tcp:127.0.0.1:6640 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.193339] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] os_vif_ovs.ovsdb_interface = native {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.193496] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] os_vif_ovs.per_port_bridge = False {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.193656] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] os_brick.lock_path = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.193841] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] privsep_osbrick.capabilities = [21] {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.194012] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] privsep_osbrick.group = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.194168] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] privsep_osbrick.helper_command = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.194329] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] privsep_osbrick.logger_name = os_brick.privileged {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.194491] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] privsep_osbrick.thread_pool_size = 8 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.194643] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] privsep_osbrick.user = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.194813] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] nova_sys_admin.capabilities = [0, 1, 2, 3, 12, 21] {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.194970] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] nova_sys_admin.group = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.195123] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] nova_sys_admin.helper_command = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.195439] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] nova_sys_admin.logger_name = oslo_privsep.daemon {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.195439] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] nova_sys_admin.thread_pool_size = 8 {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.195576] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] nova_sys_admin.user = None {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 480.195736] env[62096]: DEBUG oslo_service.service [None req-18baacba-f3f0-4a05-b4a0-15e2bd04f0e1 None None] ******************************************************************************** {{(pid=62096) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2624}} [ 480.196150] env[62096]: INFO nova.service [-] Starting compute node (version 0.0.1) [ 480.197046] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b21a03c8-b5b4-4bfa-a4e9-6a6eaeea1021 None None] Expecting reply to msg 5d1e8828073f486e996db804fd253a7d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 480.204561] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5d1e8828073f486e996db804fd253a7d [ 480.699435] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-b21a03c8-b5b4-4bfa-a4e9-6a6eaeea1021 None None] Getting list of instances from cluster (obj){ [ 480.699435] env[62096]: value = "domain-c8" [ 480.699435] env[62096]: _type = "ClusterComputeResource" [ 480.699435] env[62096]: } {{(pid=62096) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 480.700527] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-392f59a6-a5b6-4cc0-a23f-dbdfcda28b0d {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 480.709537] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-b21a03c8-b5b4-4bfa-a4e9-6a6eaeea1021 None None] Got total of 0 instances {{(pid=62096) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 480.710147] env[62096]: WARNING nova.virt.vmwareapi.driver [None req-b21a03c8-b5b4-4bfa-a4e9-6a6eaeea1021 None None] The vmwareapi driver is not tested by the OpenStack project nor does it have clear maintainer(s) and thus its quality can not be ensured. It should be considered experimental and may be removed in a future release. If you are using the driver in production please let us know via the openstack-discuss mailing list. [ 480.710636] env[62096]: INFO nova.virt.node [None req-b21a03c8-b5b4-4bfa-a4e9-6a6eaeea1021 None None] Generated node identity 6eefe13c-ab55-4c03-987f-47a62756c3b3 [ 480.710878] env[62096]: INFO nova.virt.node [None req-b21a03c8-b5b4-4bfa-a4e9-6a6eaeea1021 None None] Wrote node identity 6eefe13c-ab55-4c03-987f-47a62756c3b3 to /opt/stack/data/n-cpu-1/compute_id [ 480.711304] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b21a03c8-b5b4-4bfa-a4e9-6a6eaeea1021 None None] Expecting reply to msg 3a85c240934242fd8e74374c10a5f0b7 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 480.723262] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3a85c240934242fd8e74374c10a5f0b7 [ 481.213371] env[62096]: WARNING nova.compute.manager [None req-b21a03c8-b5b4-4bfa-a4e9-6a6eaeea1021 None None] Compute nodes ['6eefe13c-ab55-4c03-987f-47a62756c3b3'] for host cpu-1 were not found in the database. If this is the first time this service is starting on this host, then you can ignore this warning. [ 481.214151] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b21a03c8-b5b4-4bfa-a4e9-6a6eaeea1021 None None] Expecting reply to msg 99f85d20289d4e3ab8bf6f171a6bbcee in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 481.239123] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 99f85d20289d4e3ab8bf6f171a6bbcee [ 481.716901] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b21a03c8-b5b4-4bfa-a4e9-6a6eaeea1021 None None] Expecting reply to msg 6128f162790743ca8dcfda9cd911bf2d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 481.729675] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6128f162790743ca8dcfda9cd911bf2d [ 482.220621] env[62096]: INFO nova.compute.manager [None req-b21a03c8-b5b4-4bfa-a4e9-6a6eaeea1021 None None] Looking for unclaimed instances stuck in BUILDING status for nodes managed by this host [ 482.221249] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b21a03c8-b5b4-4bfa-a4e9-6a6eaeea1021 None None] Expecting reply to msg bd962712455b42e794c3e0240bf3bd5f in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 482.231964] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bd962712455b42e794c3e0240bf3bd5f [ 482.724033] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b21a03c8-b5b4-4bfa-a4e9-6a6eaeea1021 None None] Expecting reply to msg 294b6357c7f745529e0f39f8af9b998c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 482.735896] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 294b6357c7f745529e0f39f8af9b998c [ 483.226155] env[62096]: WARNING nova.compute.manager [None req-b21a03c8-b5b4-4bfa-a4e9-6a6eaeea1021 None None] No compute node record found for host cpu-1. If this is the first time this service is starting on this host, then you can ignore this warning.: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host cpu-1 could not be found. [ 483.226406] env[62096]: DEBUG oslo_concurrency.lockutils [None req-b21a03c8-b5b4-4bfa-a4e9-6a6eaeea1021 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 483.226667] env[62096]: DEBUG oslo_concurrency.lockutils [None req-b21a03c8-b5b4-4bfa-a4e9-6a6eaeea1021 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 483.226806] env[62096]: DEBUG oslo_concurrency.lockutils [None req-b21a03c8-b5b4-4bfa-a4e9-6a6eaeea1021 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 483.226951] env[62096]: DEBUG nova.compute.resource_tracker [None req-b21a03c8-b5b4-4bfa-a4e9-6a6eaeea1021 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62096) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 483.227844] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ca899d5-3e3a-4300-a455-acee73810880 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 483.235957] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40a5e667-c9cc-4cdb-a06a-e95880a8cfb7 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 483.249795] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fb7af38-1d0c-4c9e-90ba-6235c6ba5064 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 483.255805] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7314c6ac-e94e-4e0c-bd22-676d032caa21 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 483.283096] env[62096]: DEBUG nova.compute.resource_tracker [None req-b21a03c8-b5b4-4bfa-a4e9-6a6eaeea1021 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181772MB free_disk=127GB free_vcpus=48 pci_devices=None {{(pid=62096) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 483.283252] env[62096]: DEBUG oslo_concurrency.lockutils [None req-b21a03c8-b5b4-4bfa-a4e9-6a6eaeea1021 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 483.283423] env[62096]: DEBUG oslo_concurrency.lockutils [None req-b21a03c8-b5b4-4bfa-a4e9-6a6eaeea1021 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 483.283757] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b21a03c8-b5b4-4bfa-a4e9-6a6eaeea1021 None None] Expecting reply to msg 5e71925e8d0d4151b8a2f235b18b771e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 483.294933] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5e71925e8d0d4151b8a2f235b18b771e [ 483.786123] env[62096]: WARNING nova.compute.resource_tracker [None req-b21a03c8-b5b4-4bfa-a4e9-6a6eaeea1021 None None] No compute node record for cpu-1:6eefe13c-ab55-4c03-987f-47a62756c3b3: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host 6eefe13c-ab55-4c03-987f-47a62756c3b3 could not be found. [ 483.787353] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b21a03c8-b5b4-4bfa-a4e9-6a6eaeea1021 None None] Expecting reply to msg 2ef921d4baea4cef908a6585c30cafa2 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 483.798306] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2ef921d4baea4cef908a6585c30cafa2 [ 484.289963] env[62096]: INFO nova.compute.resource_tracker [None req-b21a03c8-b5b4-4bfa-a4e9-6a6eaeea1021 None None] Compute node record created for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 with uuid: 6eefe13c-ab55-4c03-987f-47a62756c3b3 [ 484.290417] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b21a03c8-b5b4-4bfa-a4e9-6a6eaeea1021 None None] Expecting reply to msg e025a71c7a6c4e3aa3280b23955cec0d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 484.301420] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e025a71c7a6c4e3aa3280b23955cec0d [ 484.793065] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b21a03c8-b5b4-4bfa-a4e9-6a6eaeea1021 None None] Expecting reply to msg b7b79b7d494c477a94e851d3a4fac8df in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 484.812868] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b7b79b7d494c477a94e851d3a4fac8df [ 485.295260] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b21a03c8-b5b4-4bfa-a4e9-6a6eaeea1021 None None] Expecting reply to msg b204afb566114a34b781caf0f481792d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 485.316811] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b204afb566114a34b781caf0f481792d [ 485.797967] env[62096]: DEBUG nova.compute.resource_tracker [None req-b21a03c8-b5b4-4bfa-a4e9-6a6eaeea1021 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62096) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 485.798771] env[62096]: DEBUG nova.compute.resource_tracker [None req-b21a03c8-b5b4-4bfa-a4e9-6a6eaeea1021 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62096) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 485.957688] env[62096]: INFO nova.scheduler.client.report [None req-b21a03c8-b5b4-4bfa-a4e9-6a6eaeea1021 None None] [req-efdf4356-1aed-4d05-9210-0c91224de96c] Created resource provider record via placement API for resource provider with UUID 6eefe13c-ab55-4c03-987f-47a62756c3b3 and name domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28. [ 485.974124] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d6c5fa5-d5b3-4fb7-a3d8-5817389b0604 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 485.982070] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dab92ccd-90d2-4266-a431-8393ff8eedca {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 486.010895] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d222b552-7e5e-4f35-a8a8-a8a3b6c98d5b {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 486.018114] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-765d6d76-1d50-46fa-820d-99fbf48ec91c {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 486.030641] env[62096]: DEBUG nova.compute.provider_tree [None req-b21a03c8-b5b4-4bfa-a4e9-6a6eaeea1021 None None] Updating inventory in ProviderTree for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 486.031222] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b21a03c8-b5b4-4bfa-a4e9-6a6eaeea1021 None None] Expecting reply to msg 973f17063a5b461887014215d71f2eac in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 486.039157] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 973f17063a5b461887014215d71f2eac [ 486.565221] env[62096]: DEBUG nova.scheduler.client.report [None req-b21a03c8-b5b4-4bfa-a4e9-6a6eaeea1021 None None] Updated inventory for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 with generation 0 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:957}} [ 486.565457] env[62096]: DEBUG nova.compute.provider_tree [None req-b21a03c8-b5b4-4bfa-a4e9-6a6eaeea1021 None None] Updating resource provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 generation from 0 to 1 during operation: update_inventory {{(pid=62096) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 486.565594] env[62096]: DEBUG nova.compute.provider_tree [None req-b21a03c8-b5b4-4bfa-a4e9-6a6eaeea1021 None None] Updating inventory in ProviderTree for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 486.615862] env[62096]: DEBUG nova.compute.provider_tree [None req-b21a03c8-b5b4-4bfa-a4e9-6a6eaeea1021 None None] Updating resource provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 generation from 1 to 2 during operation: update_traits {{(pid=62096) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 486.618199] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b21a03c8-b5b4-4bfa-a4e9-6a6eaeea1021 None None] Expecting reply to msg 85501053b7ec4b2791fb88e46099562e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 486.632908] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 85501053b7ec4b2791fb88e46099562e [ 487.120436] env[62096]: DEBUG nova.compute.resource_tracker [None req-b21a03c8-b5b4-4bfa-a4e9-6a6eaeea1021 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62096) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 487.120740] env[62096]: DEBUG oslo_concurrency.lockutils [None req-b21a03c8-b5b4-4bfa-a4e9-6a6eaeea1021 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.837s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 487.120854] env[62096]: DEBUG nova.service [None req-b21a03c8-b5b4-4bfa-a4e9-6a6eaeea1021 None None] Creating RPC server for service compute {{(pid=62096) start /opt/stack/nova/nova/service.py:186}} [ 487.144135] env[62096]: INFO oslo.messaging._drivers.impl_rabbit [None req-b21a03c8-b5b4-4bfa-a4e9-6a6eaeea1021 None None] Creating fanout queue: compute_fanout_80bf2cdf04ba4bc793b2d37aa5a9bc4f [ 487.147276] env[62096]: DEBUG nova.service [None req-b21a03c8-b5b4-4bfa-a4e9-6a6eaeea1021 None None] Join ServiceGroup membership for this service compute {{(pid=62096) start /opt/stack/nova/nova/service.py:203}} [ 487.147466] env[62096]: DEBUG nova.servicegroup.drivers.db [None req-b21a03c8-b5b4-4bfa-a4e9-6a6eaeea1021 None None] DB_Driver: join new ServiceGroup member cpu-1 to the compute group, service = {{(pid=62096) join /opt/stack/nova/nova/servicegroup/drivers/db.py:44}} [ 492.149805] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 1777e48735574686a191eff1c78889da in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 492.161503] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1777e48735574686a191eff1c78889da [ 521.779807] env[62096]: DEBUG oslo_concurrency.lockutils [None req-10ce455e-fd8c-4015-ac8b-e4c26a34f016 tempest-ServersTestFqdnHostnames-875224743 tempest-ServersTestFqdnHostnames-875224743-project-member] Acquiring lock "2e0f116a-e8d2-45b5-bb03-624a822fb4d2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 521.780122] env[62096]: DEBUG oslo_concurrency.lockutils [None req-10ce455e-fd8c-4015-ac8b-e4c26a34f016 tempest-ServersTestFqdnHostnames-875224743 tempest-ServersTestFqdnHostnames-875224743-project-member] Lock "2e0f116a-e8d2-45b5-bb03-624a822fb4d2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 521.781219] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-10ce455e-fd8c-4015-ac8b-e4c26a34f016 tempest-ServersTestFqdnHostnames-875224743 tempest-ServersTestFqdnHostnames-875224743-project-member] Expecting reply to msg f5a6c689e04549a3aafaed12138198bd in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 521.802909] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f5a6c689e04549a3aafaed12138198bd [ 522.283959] env[62096]: DEBUG nova.compute.manager [None req-10ce455e-fd8c-4015-ac8b-e4c26a34f016 tempest-ServersTestFqdnHostnames-875224743 tempest-ServersTestFqdnHostnames-875224743-project-member] [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] Starting instance... {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 522.285894] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-10ce455e-fd8c-4015-ac8b-e4c26a34f016 tempest-ServersTestFqdnHostnames-875224743 tempest-ServersTestFqdnHostnames-875224743-project-member] Expecting reply to msg 98f00e886c5b495c8a8b5d86d3f31e3d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 522.361181] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 98f00e886c5b495c8a8b5d86d3f31e3d [ 522.818268] env[62096]: DEBUG oslo_concurrency.lockutils [None req-10ce455e-fd8c-4015-ac8b-e4c26a34f016 tempest-ServersTestFqdnHostnames-875224743 tempest-ServersTestFqdnHostnames-875224743-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 522.818616] env[62096]: DEBUG oslo_concurrency.lockutils [None req-10ce455e-fd8c-4015-ac8b-e4c26a34f016 tempest-ServersTestFqdnHostnames-875224743 tempest-ServersTestFqdnHostnames-875224743-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 522.820398] env[62096]: INFO nova.compute.claims [None req-10ce455e-fd8c-4015-ac8b-e4c26a34f016 tempest-ServersTestFqdnHostnames-875224743 tempest-ServersTestFqdnHostnames-875224743-project-member] [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 522.823826] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-10ce455e-fd8c-4015-ac8b-e4c26a34f016 tempest-ServersTestFqdnHostnames-875224743 tempest-ServersTestFqdnHostnames-875224743-project-member] Expecting reply to msg 0c38ae0775944ba682987f7d738573df in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 522.912925] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0c38ae0775944ba682987f7d738573df [ 523.328495] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-10ce455e-fd8c-4015-ac8b-e4c26a34f016 tempest-ServersTestFqdnHostnames-875224743 tempest-ServersTestFqdnHostnames-875224743-project-member] Expecting reply to msg 90b64caeaaf042f49a7be77a006b1008 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 523.338953] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 90b64caeaaf042f49a7be77a006b1008 [ 523.875470] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dd98f73-cbcb-4720-8a5a-e654c5877fd4 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 523.885982] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32211a93-08c9-4239-9452-f5ff9dbf3923 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 523.918460] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86072173-558f-4543-ae8e-b4be416e747e {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 523.929745] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfb8d02c-3d86-4d97-890e-b3d9f8f2807c {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 523.944552] env[62096]: DEBUG nova.compute.provider_tree [None req-10ce455e-fd8c-4015-ac8b-e4c26a34f016 tempest-ServersTestFqdnHostnames-875224743 tempest-ServersTestFqdnHostnames-875224743-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 523.945715] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-10ce455e-fd8c-4015-ac8b-e4c26a34f016 tempest-ServersTestFqdnHostnames-875224743 tempest-ServersTestFqdnHostnames-875224743-project-member] Expecting reply to msg 31c36269db6b487baf0377f72061a8c3 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 523.959772] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 31c36269db6b487baf0377f72061a8c3 [ 524.457179] env[62096]: DEBUG nova.scheduler.client.report [None req-10ce455e-fd8c-4015-ac8b-e4c26a34f016 tempest-ServersTestFqdnHostnames-875224743 tempest-ServersTestFqdnHostnames-875224743-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 524.457179] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-10ce455e-fd8c-4015-ac8b-e4c26a34f016 tempest-ServersTestFqdnHostnames-875224743 tempest-ServersTestFqdnHostnames-875224743-project-member] Expecting reply to msg ad07b834396e4b9c914eef2a30227265 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 524.470036] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ad07b834396e4b9c914eef2a30227265 [ 524.960595] env[62096]: DEBUG oslo_concurrency.lockutils [None req-10ce455e-fd8c-4015-ac8b-e4c26a34f016 tempest-ServersTestFqdnHostnames-875224743 tempest-ServersTestFqdnHostnames-875224743-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.140s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 524.960595] env[62096]: DEBUG nova.compute.manager [None req-10ce455e-fd8c-4015-ac8b-e4c26a34f016 tempest-ServersTestFqdnHostnames-875224743 tempest-ServersTestFqdnHostnames-875224743-project-member] [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] Start building networks asynchronously for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 524.961155] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-10ce455e-fd8c-4015-ac8b-e4c26a34f016 tempest-ServersTestFqdnHostnames-875224743 tempest-ServersTestFqdnHostnames-875224743-project-member] Expecting reply to msg 788494ffd10c4ed288a6e425036ec878 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 525.028295] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 788494ffd10c4ed288a6e425036ec878 [ 525.366181] env[62096]: DEBUG oslo_concurrency.lockutils [None req-a07a81da-9645-47fc-94cd-e8687a5b10d0 tempest-ServerActionsTestJSON-232923977 tempest-ServerActionsTestJSON-232923977-project-member] Acquiring lock "018ae11e-bc51-4e0b-8f2e-c464fca6f375" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 525.366286] env[62096]: DEBUG oslo_concurrency.lockutils [None req-a07a81da-9645-47fc-94cd-e8687a5b10d0 tempest-ServerActionsTestJSON-232923977 tempest-ServerActionsTestJSON-232923977-project-member] Lock "018ae11e-bc51-4e0b-8f2e-c464fca6f375" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 525.366692] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a07a81da-9645-47fc-94cd-e8687a5b10d0 tempest-ServerActionsTestJSON-232923977 tempest-ServerActionsTestJSON-232923977-project-member] Expecting reply to msg 36477ee3013f456da1ab72acbd8d13af in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 525.379109] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 36477ee3013f456da1ab72acbd8d13af [ 525.465269] env[62096]: DEBUG nova.compute.utils [None req-10ce455e-fd8c-4015-ac8b-e4c26a34f016 tempest-ServersTestFqdnHostnames-875224743 tempest-ServersTestFqdnHostnames-875224743-project-member] Using /dev/sd instead of None {{(pid=62096) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 525.465997] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-10ce455e-fd8c-4015-ac8b-e4c26a34f016 tempest-ServersTestFqdnHostnames-875224743 tempest-ServersTestFqdnHostnames-875224743-project-member] Expecting reply to msg e95d4ef7a6074c898e722ed75a4556d7 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 525.466944] env[62096]: DEBUG nova.compute.manager [None req-10ce455e-fd8c-4015-ac8b-e4c26a34f016 tempest-ServersTestFqdnHostnames-875224743 tempest-ServersTestFqdnHostnames-875224743-project-member] [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] Allocating IP information in the background. {{(pid=62096) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 525.467184] env[62096]: DEBUG nova.network.neutron [None req-10ce455e-fd8c-4015-ac8b-e4c26a34f016 tempest-ServersTestFqdnHostnames-875224743 tempest-ServersTestFqdnHostnames-875224743-project-member] [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] allocate_for_instance() {{(pid=62096) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 525.480844] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e95d4ef7a6074c898e722ed75a4556d7 [ 525.869486] env[62096]: DEBUG nova.compute.manager [None req-a07a81da-9645-47fc-94cd-e8687a5b10d0 tempest-ServerActionsTestJSON-232923977 tempest-ServerActionsTestJSON-232923977-project-member] [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] Starting instance... {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 525.871532] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a07a81da-9645-47fc-94cd-e8687a5b10d0 tempest-ServerActionsTestJSON-232923977 tempest-ServerActionsTestJSON-232923977-project-member] Expecting reply to msg b585a8b33a5e436891b2ef7a786a5d33 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 525.934543] env[62096]: DEBUG nova.policy [None req-10ce455e-fd8c-4015-ac8b-e4c26a34f016 tempest-ServersTestFqdnHostnames-875224743 tempest-ServersTestFqdnHostnames-875224743-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b33039e637174a218c8f25d1974817da', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '22249d2bdd554c26a27cd4c7ea5c5a45', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62096) authorize /opt/stack/nova/nova/policy.py:203}} [ 525.937659] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b585a8b33a5e436891b2ef7a786a5d33 [ 525.975027] env[62096]: DEBUG nova.compute.manager [None req-10ce455e-fd8c-4015-ac8b-e4c26a34f016 tempest-ServersTestFqdnHostnames-875224743 tempest-ServersTestFqdnHostnames-875224743-project-member] [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] Start building block device mappings for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 525.976211] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-10ce455e-fd8c-4015-ac8b-e4c26a34f016 tempest-ServersTestFqdnHostnames-875224743 tempest-ServersTestFqdnHostnames-875224743-project-member] Expecting reply to msg b9863987c6ff4e2086863a48d64a7a3b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 526.021271] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b9863987c6ff4e2086863a48d64a7a3b [ 526.149026] env[62096]: DEBUG oslo_service.periodic_task [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Running periodic task ComputeManager._sync_power_states {{(pid=62096) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 526.149662] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg 225250c96e4640cf93443cc398dde1f4 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 526.165077] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 225250c96e4640cf93443cc398dde1f4 [ 526.389623] env[62096]: DEBUG oslo_concurrency.lockutils [None req-a07a81da-9645-47fc-94cd-e8687a5b10d0 tempest-ServerActionsTestJSON-232923977 tempest-ServerActionsTestJSON-232923977-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 526.389816] env[62096]: DEBUG oslo_concurrency.lockutils [None req-a07a81da-9645-47fc-94cd-e8687a5b10d0 tempest-ServerActionsTestJSON-232923977 tempest-ServerActionsTestJSON-232923977-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 526.391351] env[62096]: INFO nova.compute.claims [None req-a07a81da-9645-47fc-94cd-e8687a5b10d0 tempest-ServerActionsTestJSON-232923977 tempest-ServerActionsTestJSON-232923977-project-member] [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 526.393087] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a07a81da-9645-47fc-94cd-e8687a5b10d0 tempest-ServerActionsTestJSON-232923977 tempest-ServerActionsTestJSON-232923977-project-member] Expecting reply to msg 20ecbd4d23de454b8a0a9111e1af29ca in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 526.484154] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-10ce455e-fd8c-4015-ac8b-e4c26a34f016 tempest-ServersTestFqdnHostnames-875224743 tempest-ServersTestFqdnHostnames-875224743-project-member] Expecting reply to msg eff7a3b847a04c19983ac70c6ca9ee5e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 526.492259] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 20ecbd4d23de454b8a0a9111e1af29ca [ 526.538996] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg eff7a3b847a04c19983ac70c6ca9ee5e [ 526.539564] env[62096]: DEBUG oslo_concurrency.lockutils [None req-212fcd2e-ae1a-4ce1-9ad3-08ffa272db5b tempest-AttachInterfacesV270Test-1431958456 tempest-AttachInterfacesV270Test-1431958456-project-member] Acquiring lock "84c97672-0027-43af-800d-a11c243e8825" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 526.539776] env[62096]: DEBUG oslo_concurrency.lockutils [None req-212fcd2e-ae1a-4ce1-9ad3-08ffa272db5b tempest-AttachInterfacesV270Test-1431958456 tempest-AttachInterfacesV270Test-1431958456-project-member] Lock "84c97672-0027-43af-800d-a11c243e8825" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 526.540294] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-212fcd2e-ae1a-4ce1-9ad3-08ffa272db5b tempest-AttachInterfacesV270Test-1431958456 tempest-AttachInterfacesV270Test-1431958456-project-member] Expecting reply to msg 0f4eee2ff85144c19141f5ea829a86e8 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 526.552689] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0f4eee2ff85144c19141f5ea829a86e8 [ 526.654595] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Getting list of instances from cluster (obj){ [ 526.654595] env[62096]: value = "domain-c8" [ 526.654595] env[62096]: _type = "ClusterComputeResource" [ 526.654595] env[62096]: } {{(pid=62096) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 526.655760] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f8bef85-8ace-49b3-89ee-312577896aad {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 526.666465] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Got total of 0 instances {{(pid=62096) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 526.666465] env[62096]: WARNING nova.compute.manager [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] While synchronizing instance power states, found 1 instances in the database and 0 instances on the hypervisor. [ 526.666465] env[62096]: DEBUG nova.compute.manager [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Triggering sync for uuid 2e0f116a-e8d2-45b5-bb03-624a822fb4d2 {{(pid=62096) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10336}} [ 526.666465] env[62096]: DEBUG oslo_concurrency.lockutils [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Acquiring lock "2e0f116a-e8d2-45b5-bb03-624a822fb4d2" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 526.666465] env[62096]: DEBUG oslo_service.periodic_task [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=62096) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 526.666465] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Getting list of instances from cluster (obj){ [ 526.666465] env[62096]: value = "domain-c8" [ 526.666465] env[62096]: _type = "ClusterComputeResource" [ 526.666465] env[62096]: } {{(pid=62096) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 526.666777] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ac98f6e-7879-4d03-bbff-4f3596e85976 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 526.674928] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Got total of 0 instances {{(pid=62096) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 526.806986] env[62096]: DEBUG nova.network.neutron [None req-10ce455e-fd8c-4015-ac8b-e4c26a34f016 tempest-ServersTestFqdnHostnames-875224743 tempest-ServersTestFqdnHostnames-875224743-project-member] [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] Successfully created port: 444cedaf-2406-4024-8e04-f6253a64d8ea {{(pid=62096) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 526.896463] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a07a81da-9645-47fc-94cd-e8687a5b10d0 tempest-ServerActionsTestJSON-232923977 tempest-ServerActionsTestJSON-232923977-project-member] Expecting reply to msg f8541e40ebed46a88f6d2709be2eed61 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 526.905685] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f8541e40ebed46a88f6d2709be2eed61 [ 526.988740] env[62096]: DEBUG nova.compute.manager [None req-10ce455e-fd8c-4015-ac8b-e4c26a34f016 tempest-ServersTestFqdnHostnames-875224743 tempest-ServersTestFqdnHostnames-875224743-project-member] [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] Start spawning the instance on the hypervisor. {{(pid=62096) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 527.012461] env[62096]: DEBUG nova.virt.hardware [None req-10ce455e-fd8c-4015-ac8b-e4c26a34f016 tempest-ServersTestFqdnHostnames-875224743 tempest-ServersTestFqdnHostnames-875224743-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-19T11:52:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-19T11:51:55Z,direct_url=,disk_format='vmdk',id=fb764baa-3805-45c4-a694-aa91b0932110,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bd76085d004b408bbcf1ebf76ba2593e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-19T11:51:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 527.012708] env[62096]: DEBUG nova.virt.hardware [None req-10ce455e-fd8c-4015-ac8b-e4c26a34f016 tempest-ServersTestFqdnHostnames-875224743 tempest-ServersTestFqdnHostnames-875224743-project-member] Flavor limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 527.012867] env[62096]: DEBUG nova.virt.hardware [None req-10ce455e-fd8c-4015-ac8b-e4c26a34f016 tempest-ServersTestFqdnHostnames-875224743 tempest-ServersTestFqdnHostnames-875224743-project-member] Image limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 527.013051] env[62096]: DEBUG nova.virt.hardware [None req-10ce455e-fd8c-4015-ac8b-e4c26a34f016 tempest-ServersTestFqdnHostnames-875224743 tempest-ServersTestFqdnHostnames-875224743-project-member] Flavor pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 527.013190] env[62096]: DEBUG nova.virt.hardware [None req-10ce455e-fd8c-4015-ac8b-e4c26a34f016 tempest-ServersTestFqdnHostnames-875224743 tempest-ServersTestFqdnHostnames-875224743-project-member] Image pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 527.013336] env[62096]: DEBUG nova.virt.hardware [None req-10ce455e-fd8c-4015-ac8b-e4c26a34f016 tempest-ServersTestFqdnHostnames-875224743 tempest-ServersTestFqdnHostnames-875224743-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 527.013542] env[62096]: DEBUG nova.virt.hardware [None req-10ce455e-fd8c-4015-ac8b-e4c26a34f016 tempest-ServersTestFqdnHostnames-875224743 tempest-ServersTestFqdnHostnames-875224743-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 527.013696] env[62096]: DEBUG nova.virt.hardware [None req-10ce455e-fd8c-4015-ac8b-e4c26a34f016 tempest-ServersTestFqdnHostnames-875224743 tempest-ServersTestFqdnHostnames-875224743-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 527.014110] env[62096]: DEBUG nova.virt.hardware [None req-10ce455e-fd8c-4015-ac8b-e4c26a34f016 tempest-ServersTestFqdnHostnames-875224743 tempest-ServersTestFqdnHostnames-875224743-project-member] Got 1 possible topologies {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 527.014288] env[62096]: DEBUG nova.virt.hardware [None req-10ce455e-fd8c-4015-ac8b-e4c26a34f016 tempest-ServersTestFqdnHostnames-875224743 tempest-ServersTestFqdnHostnames-875224743-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 527.014494] env[62096]: DEBUG nova.virt.hardware [None req-10ce455e-fd8c-4015-ac8b-e4c26a34f016 tempest-ServersTestFqdnHostnames-875224743 tempest-ServersTestFqdnHostnames-875224743-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 527.015429] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ee81ce4-1288-4ebe-a9a4-91fa2d811004 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 527.023553] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f671444a-dbbf-44e7-bf48-28547f426892 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 527.039720] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b76cc8fd-33d9-4191-8da5-d47ef2d5763f {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 527.050934] env[62096]: DEBUG nova.compute.manager [None req-212fcd2e-ae1a-4ce1-9ad3-08ffa272db5b tempest-AttachInterfacesV270Test-1431958456 tempest-AttachInterfacesV270Test-1431958456-project-member] [instance: 84c97672-0027-43af-800d-a11c243e8825] Starting instance... {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 527.052558] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-212fcd2e-ae1a-4ce1-9ad3-08ffa272db5b tempest-AttachInterfacesV270Test-1431958456 tempest-AttachInterfacesV270Test-1431958456-project-member] Expecting reply to msg dcfcc8e7ef244f05a59b3bd36d7906e8 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 527.099467] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dcfcc8e7ef244f05a59b3bd36d7906e8 [ 527.483699] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3eec6248-14d2-482b-9a89-795eb8834fe2 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 527.492723] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16a0c73e-42d1-4439-9adf-d8607378e25e {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 527.531965] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33d2a94f-4823-4962-a861-bcefe6183706 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 527.542173] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3afb5c26-0ced-4272-93c9-1beb9c421936 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 527.561550] env[62096]: DEBUG nova.compute.provider_tree [None req-a07a81da-9645-47fc-94cd-e8687a5b10d0 tempest-ServerActionsTestJSON-232923977 tempest-ServerActionsTestJSON-232923977-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 527.562268] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a07a81da-9645-47fc-94cd-e8687a5b10d0 tempest-ServerActionsTestJSON-232923977 tempest-ServerActionsTestJSON-232923977-project-member] Expecting reply to msg 1351b92064d64dada1495ba898309dc6 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 527.583652] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1351b92064d64dada1495ba898309dc6 [ 527.584902] env[62096]: DEBUG oslo_concurrency.lockutils [None req-212fcd2e-ae1a-4ce1-9ad3-08ffa272db5b tempest-AttachInterfacesV270Test-1431958456 tempest-AttachInterfacesV270Test-1431958456-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 528.078167] env[62096]: DEBUG nova.scheduler.client.report [None req-a07a81da-9645-47fc-94cd-e8687a5b10d0 tempest-ServerActionsTestJSON-232923977 tempest-ServerActionsTestJSON-232923977-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 528.080497] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a07a81da-9645-47fc-94cd-e8687a5b10d0 tempest-ServerActionsTestJSON-232923977 tempest-ServerActionsTestJSON-232923977-project-member] Expecting reply to msg 92c489325a514065b6b2007f5ad7159c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 528.113983] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 92c489325a514065b6b2007f5ad7159c [ 528.584043] env[62096]: DEBUG oslo_concurrency.lockutils [None req-a07a81da-9645-47fc-94cd-e8687a5b10d0 tempest-ServerActionsTestJSON-232923977 tempest-ServerActionsTestJSON-232923977-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.194s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 528.584629] env[62096]: DEBUG nova.compute.manager [None req-a07a81da-9645-47fc-94cd-e8687a5b10d0 tempest-ServerActionsTestJSON-232923977 tempest-ServerActionsTestJSON-232923977-project-member] [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] Start building networks asynchronously for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 528.586556] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a07a81da-9645-47fc-94cd-e8687a5b10d0 tempest-ServerActionsTestJSON-232923977 tempest-ServerActionsTestJSON-232923977-project-member] Expecting reply to msg a4b61a10dfed457683af0ae3e0942093 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 528.587864] env[62096]: DEBUG oslo_concurrency.lockutils [None req-212fcd2e-ae1a-4ce1-9ad3-08ffa272db5b tempest-AttachInterfacesV270Test-1431958456 tempest-AttachInterfacesV270Test-1431958456-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.003s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 528.595597] env[62096]: INFO nova.compute.claims [None req-212fcd2e-ae1a-4ce1-9ad3-08ffa272db5b tempest-AttachInterfacesV270Test-1431958456 tempest-AttachInterfacesV270Test-1431958456-project-member] [instance: 84c97672-0027-43af-800d-a11c243e8825] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 528.595597] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-212fcd2e-ae1a-4ce1-9ad3-08ffa272db5b tempest-AttachInterfacesV270Test-1431958456 tempest-AttachInterfacesV270Test-1431958456-project-member] Expecting reply to msg 00088a24f7894422b68c0f0b83a01c3b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 528.636368] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a4b61a10dfed457683af0ae3e0942093 [ 528.662099] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 00088a24f7894422b68c0f0b83a01c3b [ 528.969150] env[62096]: DEBUG oslo_concurrency.lockutils [None req-94f33e3e-2f48-4192-a8c5-afbbc2b01234 tempest-ServerDiagnosticsNegativeTest-1941278843 tempest-ServerDiagnosticsNegativeTest-1941278843-project-member] Acquiring lock "89eb1f17-1d36-4b6f-8102-04708d55a81c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 528.969630] env[62096]: DEBUG oslo_concurrency.lockutils [None req-94f33e3e-2f48-4192-a8c5-afbbc2b01234 tempest-ServerDiagnosticsNegativeTest-1941278843 tempest-ServerDiagnosticsNegativeTest-1941278843-project-member] Lock "89eb1f17-1d36-4b6f-8102-04708d55a81c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 528.970327] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-94f33e3e-2f48-4192-a8c5-afbbc2b01234 tempest-ServerDiagnosticsNegativeTest-1941278843 tempest-ServerDiagnosticsNegativeTest-1941278843-project-member] Expecting reply to msg 098cc58c68ca4178a9b1a18721d1728e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 528.982816] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 098cc58c68ca4178a9b1a18721d1728e [ 529.094925] env[62096]: DEBUG nova.compute.utils [None req-a07a81da-9645-47fc-94cd-e8687a5b10d0 tempest-ServerActionsTestJSON-232923977 tempest-ServerActionsTestJSON-232923977-project-member] Using /dev/sd instead of None {{(pid=62096) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 529.095720] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a07a81da-9645-47fc-94cd-e8687a5b10d0 tempest-ServerActionsTestJSON-232923977 tempest-ServerActionsTestJSON-232923977-project-member] Expecting reply to msg 758f54417ef04f0a81672a3eece2d16c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 529.096679] env[62096]: DEBUG nova.compute.manager [None req-a07a81da-9645-47fc-94cd-e8687a5b10d0 tempest-ServerActionsTestJSON-232923977 tempest-ServerActionsTestJSON-232923977-project-member] [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] Allocating IP information in the background. {{(pid=62096) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 529.096941] env[62096]: DEBUG nova.network.neutron [None req-a07a81da-9645-47fc-94cd-e8687a5b10d0 tempest-ServerActionsTestJSON-232923977 tempest-ServerActionsTestJSON-232923977-project-member] [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] allocate_for_instance() {{(pid=62096) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 529.105797] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-212fcd2e-ae1a-4ce1-9ad3-08ffa272db5b tempest-AttachInterfacesV270Test-1431958456 tempest-AttachInterfacesV270Test-1431958456-project-member] Expecting reply to msg 13fe9348b0f3468fbd20a052ef3263d7 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 529.118582] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 758f54417ef04f0a81672a3eece2d16c [ 529.123220] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 13fe9348b0f3468fbd20a052ef3263d7 [ 529.314419] env[62096]: DEBUG oslo_concurrency.lockutils [None req-7dd77095-b2e2-4af3-8029-5e47b60be2ee tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Acquiring lock "5ea9e99b-dffd-4b11-97ff-634551c00745" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 529.314963] env[62096]: DEBUG oslo_concurrency.lockutils [None req-7dd77095-b2e2-4af3-8029-5e47b60be2ee tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Lock "5ea9e99b-dffd-4b11-97ff-634551c00745" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 529.315418] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-7dd77095-b2e2-4af3-8029-5e47b60be2ee tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Expecting reply to msg c49e39420aac44e3b5260fe7979e6411 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 529.316150] env[62096]: DEBUG oslo_concurrency.lockutils [None req-4495e51c-6f81-4215-8c28-fb72754e88b3 tempest-ServersAdminNegativeTestJSON-706848671 tempest-ServersAdminNegativeTestJSON-706848671-project-member] Acquiring lock "ebc7dc27-110e-4edc-87be-cb0ecdbe215f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 529.316349] env[62096]: DEBUG oslo_concurrency.lockutils [None req-4495e51c-6f81-4215-8c28-fb72754e88b3 tempest-ServersAdminNegativeTestJSON-706848671 tempest-ServersAdminNegativeTestJSON-706848671-project-member] Lock "ebc7dc27-110e-4edc-87be-cb0ecdbe215f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 529.316733] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4495e51c-6f81-4215-8c28-fb72754e88b3 tempest-ServersAdminNegativeTestJSON-706848671 tempest-ServersAdminNegativeTestJSON-706848671-project-member] Expecting reply to msg 89a28b4bf0634fd28095101ddbd33530 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 529.327925] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 89a28b4bf0634fd28095101ddbd33530 [ 529.333418] env[62096]: DEBUG nova.policy [None req-a07a81da-9645-47fc-94cd-e8687a5b10d0 tempest-ServerActionsTestJSON-232923977 tempest-ServerActionsTestJSON-232923977-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c98a4b54cffb4e81822b45503bba51ce', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '72bd18d2316244598b2744de10936522', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62096) authorize /opt/stack/nova/nova/policy.py:203}} [ 529.335217] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c49e39420aac44e3b5260fe7979e6411 [ 529.477656] env[62096]: DEBUG nova.compute.manager [None req-94f33e3e-2f48-4192-a8c5-afbbc2b01234 tempest-ServerDiagnosticsNegativeTest-1941278843 tempest-ServerDiagnosticsNegativeTest-1941278843-project-member] [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] Starting instance... {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 529.479336] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-94f33e3e-2f48-4192-a8c5-afbbc2b01234 tempest-ServerDiagnosticsNegativeTest-1941278843 tempest-ServerDiagnosticsNegativeTest-1941278843-project-member] Expecting reply to msg 3d08de60760d4a5480c2c412119e8eed in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 529.518804] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3d08de60760d4a5480c2c412119e8eed [ 529.592052] env[62096]: ERROR nova.compute.manager [None req-10ce455e-fd8c-4015-ac8b-e4c26a34f016 tempest-ServersTestFqdnHostnames-875224743 tempest-ServersTestFqdnHostnames-875224743-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 444cedaf-2406-4024-8e04-f6253a64d8ea, please check neutron logs for more information. [ 529.592052] env[62096]: ERROR nova.compute.manager Traceback (most recent call last): [ 529.592052] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 529.592052] env[62096]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 529.592052] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 529.592052] env[62096]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 529.592052] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 529.592052] env[62096]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 529.592052] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 529.592052] env[62096]: ERROR nova.compute.manager self.force_reraise() [ 529.592052] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 529.592052] env[62096]: ERROR nova.compute.manager raise self.value [ 529.592052] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 529.592052] env[62096]: ERROR nova.compute.manager updated_port = self._update_port( [ 529.592052] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 529.592052] env[62096]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 529.592579] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 529.592579] env[62096]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 529.592579] env[62096]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 444cedaf-2406-4024-8e04-f6253a64d8ea, please check neutron logs for more information. [ 529.592579] env[62096]: ERROR nova.compute.manager [ 529.592579] env[62096]: Traceback (most recent call last): [ 529.592579] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 529.592579] env[62096]: listener.cb(fileno) [ 529.592579] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 529.592579] env[62096]: result = function(*args, **kwargs) [ 529.592579] env[62096]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 529.592579] env[62096]: return func(*args, **kwargs) [ 529.592579] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 529.592579] env[62096]: raise e [ 529.592579] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 529.592579] env[62096]: nwinfo = self.network_api.allocate_for_instance( [ 529.592579] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 529.592579] env[62096]: created_port_ids = self._update_ports_for_instance( [ 529.592579] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 529.592579] env[62096]: with excutils.save_and_reraise_exception(): [ 529.592579] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 529.592579] env[62096]: self.force_reraise() [ 529.592579] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 529.592579] env[62096]: raise self.value [ 529.592579] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 529.592579] env[62096]: updated_port = self._update_port( [ 529.592579] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 529.592579] env[62096]: _ensure_no_port_binding_failure(port) [ 529.592579] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 529.592579] env[62096]: raise exception.PortBindingFailed(port_id=port['id']) [ 529.593422] env[62096]: nova.exception.PortBindingFailed: Binding failed for port 444cedaf-2406-4024-8e04-f6253a64d8ea, please check neutron logs for more information. [ 529.593422] env[62096]: Removing descriptor: 14 [ 529.593778] env[62096]: ERROR nova.compute.manager [None req-10ce455e-fd8c-4015-ac8b-e4c26a34f016 tempest-ServersTestFqdnHostnames-875224743 tempest-ServersTestFqdnHostnames-875224743-project-member] [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 444cedaf-2406-4024-8e04-f6253a64d8ea, please check neutron logs for more information. [ 529.593778] env[62096]: ERROR nova.compute.manager [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] Traceback (most recent call last): [ 529.593778] env[62096]: ERROR nova.compute.manager [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 529.593778] env[62096]: ERROR nova.compute.manager [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] yield resources [ 529.593778] env[62096]: ERROR nova.compute.manager [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 529.593778] env[62096]: ERROR nova.compute.manager [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] self.driver.spawn(context, instance, image_meta, [ 529.593778] env[62096]: ERROR nova.compute.manager [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 529.593778] env[62096]: ERROR nova.compute.manager [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 529.593778] env[62096]: ERROR nova.compute.manager [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 529.593778] env[62096]: ERROR nova.compute.manager [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] vm_ref = self.build_virtual_machine(instance, [ 529.593778] env[62096]: ERROR nova.compute.manager [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 529.594135] env[62096]: ERROR nova.compute.manager [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] vif_infos = vmwarevif.get_vif_info(self._session, [ 529.594135] env[62096]: ERROR nova.compute.manager [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 529.594135] env[62096]: ERROR nova.compute.manager [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] for vif in network_info: [ 529.594135] env[62096]: ERROR nova.compute.manager [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 529.594135] env[62096]: ERROR nova.compute.manager [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] return self._sync_wrapper(fn, *args, **kwargs) [ 529.594135] env[62096]: ERROR nova.compute.manager [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 529.594135] env[62096]: ERROR nova.compute.manager [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] self.wait() [ 529.594135] env[62096]: ERROR nova.compute.manager [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 529.594135] env[62096]: ERROR nova.compute.manager [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] self[:] = self._gt.wait() [ 529.594135] env[62096]: ERROR nova.compute.manager [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 529.594135] env[62096]: ERROR nova.compute.manager [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] return self._exit_event.wait() [ 529.594135] env[62096]: ERROR nova.compute.manager [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 529.594135] env[62096]: ERROR nova.compute.manager [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] result = hub.switch() [ 529.594501] env[62096]: ERROR nova.compute.manager [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 529.594501] env[62096]: ERROR nova.compute.manager [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] return self.greenlet.switch() [ 529.594501] env[62096]: ERROR nova.compute.manager [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 529.594501] env[62096]: ERROR nova.compute.manager [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] result = function(*args, **kwargs) [ 529.594501] env[62096]: ERROR nova.compute.manager [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 529.594501] env[62096]: ERROR nova.compute.manager [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] return func(*args, **kwargs) [ 529.594501] env[62096]: ERROR nova.compute.manager [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 529.594501] env[62096]: ERROR nova.compute.manager [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] raise e [ 529.594501] env[62096]: ERROR nova.compute.manager [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 529.594501] env[62096]: ERROR nova.compute.manager [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] nwinfo = self.network_api.allocate_for_instance( [ 529.594501] env[62096]: ERROR nova.compute.manager [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 529.594501] env[62096]: ERROR nova.compute.manager [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] created_port_ids = self._update_ports_for_instance( [ 529.594501] env[62096]: ERROR nova.compute.manager [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 529.594873] env[62096]: ERROR nova.compute.manager [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] with excutils.save_and_reraise_exception(): [ 529.594873] env[62096]: ERROR nova.compute.manager [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 529.594873] env[62096]: ERROR nova.compute.manager [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] self.force_reraise() [ 529.594873] env[62096]: ERROR nova.compute.manager [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 529.594873] env[62096]: ERROR nova.compute.manager [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] raise self.value [ 529.594873] env[62096]: ERROR nova.compute.manager [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 529.594873] env[62096]: ERROR nova.compute.manager [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] updated_port = self._update_port( [ 529.594873] env[62096]: ERROR nova.compute.manager [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 529.594873] env[62096]: ERROR nova.compute.manager [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] _ensure_no_port_binding_failure(port) [ 529.594873] env[62096]: ERROR nova.compute.manager [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 529.594873] env[62096]: ERROR nova.compute.manager [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] raise exception.PortBindingFailed(port_id=port['id']) [ 529.594873] env[62096]: ERROR nova.compute.manager [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] nova.exception.PortBindingFailed: Binding failed for port 444cedaf-2406-4024-8e04-f6253a64d8ea, please check neutron logs for more information. [ 529.594873] env[62096]: ERROR nova.compute.manager [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] [ 529.595244] env[62096]: INFO nova.compute.manager [None req-10ce455e-fd8c-4015-ac8b-e4c26a34f016 tempest-ServersTestFqdnHostnames-875224743 tempest-ServersTestFqdnHostnames-875224743-project-member] [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] Terminating instance [ 529.596297] env[62096]: DEBUG oslo_concurrency.lockutils [None req-10ce455e-fd8c-4015-ac8b-e4c26a34f016 tempest-ServersTestFqdnHostnames-875224743 tempest-ServersTestFqdnHostnames-875224743-project-member] Acquiring lock "refresh_cache-2e0f116a-e8d2-45b5-bb03-624a822fb4d2" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 529.596457] env[62096]: DEBUG oslo_concurrency.lockutils [None req-10ce455e-fd8c-4015-ac8b-e4c26a34f016 tempest-ServersTestFqdnHostnames-875224743 tempest-ServersTestFqdnHostnames-875224743-project-member] Acquired lock "refresh_cache-2e0f116a-e8d2-45b5-bb03-624a822fb4d2" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 529.596621] env[62096]: DEBUG nova.network.neutron [None req-10ce455e-fd8c-4015-ac8b-e4c26a34f016 tempest-ServersTestFqdnHostnames-875224743 tempest-ServersTestFqdnHostnames-875224743-project-member] [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 529.597185] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-10ce455e-fd8c-4015-ac8b-e4c26a34f016 tempest-ServersTestFqdnHostnames-875224743 tempest-ServersTestFqdnHostnames-875224743-project-member] Expecting reply to msg ebca0e84e1b74b14a0e48d895e7e60ca in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 529.601222] env[62096]: DEBUG nova.compute.manager [None req-a07a81da-9645-47fc-94cd-e8687a5b10d0 tempest-ServerActionsTestJSON-232923977 tempest-ServerActionsTestJSON-232923977-project-member] [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] Start building block device mappings for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 529.602864] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a07a81da-9645-47fc-94cd-e8687a5b10d0 tempest-ServerActionsTestJSON-232923977 tempest-ServerActionsTestJSON-232923977-project-member] Expecting reply to msg c0c96c447dc448d1bcb4cdf81c80296f in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 529.607458] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ebca0e84e1b74b14a0e48d895e7e60ca [ 529.655365] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c0c96c447dc448d1bcb4cdf81c80296f [ 529.737308] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f464c8b4-057a-438b-bbb7-56a7f23f6bf9 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 529.746980] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e41a059-e751-4775-9131-9186d7d1780c {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 529.797300] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb026cf9-5065-4aad-9e20-cf47287dec70 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 529.806232] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8a405a0-48df-4448-904a-083f87605d7e {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 529.827099] env[62096]: DEBUG nova.compute.manager [None req-7dd77095-b2e2-4af3-8029-5e47b60be2ee tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] Starting instance... {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 529.829272] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-7dd77095-b2e2-4af3-8029-5e47b60be2ee tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Expecting reply to msg ea4df860d8d744b6970da5d5e97653a8 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 529.830749] env[62096]: DEBUG nova.compute.manager [None req-4495e51c-6f81-4215-8c28-fb72754e88b3 tempest-ServersAdminNegativeTestJSON-706848671 tempest-ServersAdminNegativeTestJSON-706848671-project-member] [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] Starting instance... {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 529.833164] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4495e51c-6f81-4215-8c28-fb72754e88b3 tempest-ServersAdminNegativeTestJSON-706848671 tempest-ServersAdminNegativeTestJSON-706848671-project-member] Expecting reply to msg b33dc2f902154b34ab68ca6ddc619748 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 529.834862] env[62096]: DEBUG nova.compute.provider_tree [None req-212fcd2e-ae1a-4ce1-9ad3-08ffa272db5b tempest-AttachInterfacesV270Test-1431958456 tempest-AttachInterfacesV270Test-1431958456-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 529.836307] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-212fcd2e-ae1a-4ce1-9ad3-08ffa272db5b tempest-AttachInterfacesV270Test-1431958456 tempest-AttachInterfacesV270Test-1431958456-project-member] Expecting reply to msg 78c3660fcbf642bba86920b72d00d4b5 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 529.865368] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 78c3660fcbf642bba86920b72d00d4b5 [ 529.906133] env[62096]: DEBUG nova.network.neutron [None req-a07a81da-9645-47fc-94cd-e8687a5b10d0 tempest-ServerActionsTestJSON-232923977 tempest-ServerActionsTestJSON-232923977-project-member] [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] Successfully created port: 00d94847-7f83-4bc0-ba71-e7943d2957af {{(pid=62096) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 529.928694] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b33dc2f902154b34ab68ca6ddc619748 [ 529.939988] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ea4df860d8d744b6970da5d5e97653a8 [ 530.002541] env[62096]: DEBUG oslo_concurrency.lockutils [None req-94f33e3e-2f48-4192-a8c5-afbbc2b01234 tempest-ServerDiagnosticsNegativeTest-1941278843 tempest-ServerDiagnosticsNegativeTest-1941278843-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 530.107794] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a07a81da-9645-47fc-94cd-e8687a5b10d0 tempest-ServerActionsTestJSON-232923977 tempest-ServerActionsTestJSON-232923977-project-member] Expecting reply to msg a73975ca5e2742b988b267269d5c8936 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 530.158739] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a73975ca5e2742b988b267269d5c8936 [ 530.244753] env[62096]: DEBUG oslo_concurrency.lockutils [None req-530f9427-aadb-4fc8-9c2d-063e58e29f49 tempest-FloatingIPsAssociationTestJSON-1970391969 tempest-FloatingIPsAssociationTestJSON-1970391969-project-member] Acquiring lock "8001fc1b-fa8c-496d-acc8-922cfd5d5cc2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 530.245013] env[62096]: DEBUG oslo_concurrency.lockutils [None req-530f9427-aadb-4fc8-9c2d-063e58e29f49 tempest-FloatingIPsAssociationTestJSON-1970391969 tempest-FloatingIPsAssociationTestJSON-1970391969-project-member] Lock "8001fc1b-fa8c-496d-acc8-922cfd5d5cc2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 530.245474] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-530f9427-aadb-4fc8-9c2d-063e58e29f49 tempest-FloatingIPsAssociationTestJSON-1970391969 tempest-FloatingIPsAssociationTestJSON-1970391969-project-member] Expecting reply to msg 7646d2bc94384ee0aaaca7e8fbc68e99 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 530.249784] env[62096]: DEBUG nova.network.neutron [None req-10ce455e-fd8c-4015-ac8b-e4c26a34f016 tempest-ServersTestFqdnHostnames-875224743 tempest-ServersTestFqdnHostnames-875224743-project-member] [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 530.257771] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7646d2bc94384ee0aaaca7e8fbc68e99 [ 530.354998] env[62096]: DEBUG oslo_concurrency.lockutils [None req-7dd77095-b2e2-4af3-8029-5e47b60be2ee tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 530.356864] env[62096]: DEBUG nova.scheduler.client.report [None req-212fcd2e-ae1a-4ce1-9ad3-08ffa272db5b tempest-AttachInterfacesV270Test-1431958456 tempest-AttachInterfacesV270Test-1431958456-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 530.359946] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-212fcd2e-ae1a-4ce1-9ad3-08ffa272db5b tempest-AttachInterfacesV270Test-1431958456 tempest-AttachInterfacesV270Test-1431958456-project-member] Expecting reply to msg f437d5e6d9f14387804df6b4afd2593b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 530.366214] env[62096]: DEBUG oslo_concurrency.lockutils [None req-4495e51c-6f81-4215-8c28-fb72754e88b3 tempest-ServersAdminNegativeTestJSON-706848671 tempest-ServersAdminNegativeTestJSON-706848671-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 530.374611] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f437d5e6d9f14387804df6b4afd2593b [ 530.413527] env[62096]: DEBUG nova.network.neutron [None req-10ce455e-fd8c-4015-ac8b-e4c26a34f016 tempest-ServersTestFqdnHostnames-875224743 tempest-ServersTestFqdnHostnames-875224743-project-member] [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 530.414070] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-10ce455e-fd8c-4015-ac8b-e4c26a34f016 tempest-ServersTestFqdnHostnames-875224743 tempest-ServersTestFqdnHostnames-875224743-project-member] Expecting reply to msg 4ccdf569aeda4b2c84186e9ba52d86e4 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 530.423281] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4ccdf569aeda4b2c84186e9ba52d86e4 [ 530.611212] env[62096]: DEBUG nova.compute.manager [None req-a07a81da-9645-47fc-94cd-e8687a5b10d0 tempest-ServerActionsTestJSON-232923977 tempest-ServerActionsTestJSON-232923977-project-member] [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] Start spawning the instance on the hypervisor. {{(pid=62096) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 530.652622] env[62096]: DEBUG nova.virt.hardware [None req-a07a81da-9645-47fc-94cd-e8687a5b10d0 tempest-ServerActionsTestJSON-232923977 tempest-ServerActionsTestJSON-232923977-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-19T11:52:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-19T11:51:55Z,direct_url=,disk_format='vmdk',id=fb764baa-3805-45c4-a694-aa91b0932110,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bd76085d004b408bbcf1ebf76ba2593e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-19T11:51:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 530.652857] env[62096]: DEBUG nova.virt.hardware [None req-a07a81da-9645-47fc-94cd-e8687a5b10d0 tempest-ServerActionsTestJSON-232923977 tempest-ServerActionsTestJSON-232923977-project-member] Flavor limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 530.653009] env[62096]: DEBUG nova.virt.hardware [None req-a07a81da-9645-47fc-94cd-e8687a5b10d0 tempest-ServerActionsTestJSON-232923977 tempest-ServerActionsTestJSON-232923977-project-member] Image limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 530.653181] env[62096]: DEBUG nova.virt.hardware [None req-a07a81da-9645-47fc-94cd-e8687a5b10d0 tempest-ServerActionsTestJSON-232923977 tempest-ServerActionsTestJSON-232923977-project-member] Flavor pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 530.653320] env[62096]: DEBUG nova.virt.hardware [None req-a07a81da-9645-47fc-94cd-e8687a5b10d0 tempest-ServerActionsTestJSON-232923977 tempest-ServerActionsTestJSON-232923977-project-member] Image pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 530.653461] env[62096]: DEBUG nova.virt.hardware [None req-a07a81da-9645-47fc-94cd-e8687a5b10d0 tempest-ServerActionsTestJSON-232923977 tempest-ServerActionsTestJSON-232923977-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 530.653659] env[62096]: DEBUG nova.virt.hardware [None req-a07a81da-9645-47fc-94cd-e8687a5b10d0 tempest-ServerActionsTestJSON-232923977 tempest-ServerActionsTestJSON-232923977-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 530.653841] env[62096]: DEBUG nova.virt.hardware [None req-a07a81da-9645-47fc-94cd-e8687a5b10d0 tempest-ServerActionsTestJSON-232923977 tempest-ServerActionsTestJSON-232923977-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 530.654032] env[62096]: DEBUG nova.virt.hardware [None req-a07a81da-9645-47fc-94cd-e8687a5b10d0 tempest-ServerActionsTestJSON-232923977 tempest-ServerActionsTestJSON-232923977-project-member] Got 1 possible topologies {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 530.654191] env[62096]: DEBUG nova.virt.hardware [None req-a07a81da-9645-47fc-94cd-e8687a5b10d0 tempest-ServerActionsTestJSON-232923977 tempest-ServerActionsTestJSON-232923977-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 530.654376] env[62096]: DEBUG nova.virt.hardware [None req-a07a81da-9645-47fc-94cd-e8687a5b10d0 tempest-ServerActionsTestJSON-232923977 tempest-ServerActionsTestJSON-232923977-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 530.655245] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71928733-7a5e-4378-a81c-c6288005173a {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.663820] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b602c575-55e8-4c0c-9d2b-865b63cc363b {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.747518] env[62096]: DEBUG nova.compute.manager [None req-530f9427-aadb-4fc8-9c2d-063e58e29f49 tempest-FloatingIPsAssociationTestJSON-1970391969 tempest-FloatingIPsAssociationTestJSON-1970391969-project-member] [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] Starting instance... {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 530.749347] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-530f9427-aadb-4fc8-9c2d-063e58e29f49 tempest-FloatingIPsAssociationTestJSON-1970391969 tempest-FloatingIPsAssociationTestJSON-1970391969-project-member] Expecting reply to msg babb01a795974715a47411098188344b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 530.798996] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg babb01a795974715a47411098188344b [ 530.866918] env[62096]: DEBUG oslo_concurrency.lockutils [None req-212fcd2e-ae1a-4ce1-9ad3-08ffa272db5b tempest-AttachInterfacesV270Test-1431958456 tempest-AttachInterfacesV270Test-1431958456-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.279s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 530.867446] env[62096]: DEBUG nova.compute.manager [None req-212fcd2e-ae1a-4ce1-9ad3-08ffa272db5b tempest-AttachInterfacesV270Test-1431958456 tempest-AttachInterfacesV270Test-1431958456-project-member] [instance: 84c97672-0027-43af-800d-a11c243e8825] Start building networks asynchronously for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 530.869548] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-212fcd2e-ae1a-4ce1-9ad3-08ffa272db5b tempest-AttachInterfacesV270Test-1431958456 tempest-AttachInterfacesV270Test-1431958456-project-member] Expecting reply to msg b72d1f5d75c949fc83fd709e3f5bb030 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 530.870688] env[62096]: DEBUG oslo_concurrency.lockutils [None req-94f33e3e-2f48-4192-a8c5-afbbc2b01234 tempest-ServerDiagnosticsNegativeTest-1941278843 tempest-ServerDiagnosticsNegativeTest-1941278843-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.868s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 530.881467] env[62096]: INFO nova.compute.claims [None req-94f33e3e-2f48-4192-a8c5-afbbc2b01234 tempest-ServerDiagnosticsNegativeTest-1941278843 tempest-ServerDiagnosticsNegativeTest-1941278843-project-member] [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 530.883349] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-94f33e3e-2f48-4192-a8c5-afbbc2b01234 tempest-ServerDiagnosticsNegativeTest-1941278843 tempest-ServerDiagnosticsNegativeTest-1941278843-project-member] Expecting reply to msg b1a0d5767cd74e27bf27c58d1e3fe4ce in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 530.916653] env[62096]: DEBUG oslo_concurrency.lockutils [None req-10ce455e-fd8c-4015-ac8b-e4c26a34f016 tempest-ServersTestFqdnHostnames-875224743 tempest-ServersTestFqdnHostnames-875224743-project-member] Releasing lock "refresh_cache-2e0f116a-e8d2-45b5-bb03-624a822fb4d2" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 530.917070] env[62096]: DEBUG nova.compute.manager [None req-10ce455e-fd8c-4015-ac8b-e4c26a34f016 tempest-ServersTestFqdnHostnames-875224743 tempest-ServersTestFqdnHostnames-875224743-project-member] [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] Start destroying the instance on the hypervisor. {{(pid=62096) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 530.917264] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-10ce455e-fd8c-4015-ac8b-e4c26a34f016 tempest-ServersTestFqdnHostnames-875224743 tempest-ServersTestFqdnHostnames-875224743-project-member] [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] Destroying instance {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 530.917568] env[62096]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-350063b7-2ba6-4c6e-86b0-2b723d14f6e0 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.926517] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3a2c17e-1ca8-4d56-ad33-3e44c1f0f290 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.938325] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b1a0d5767cd74e27bf27c58d1e3fe4ce [ 530.943885] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b72d1f5d75c949fc83fd709e3f5bb030 [ 530.950790] env[62096]: WARNING nova.virt.vmwareapi.vmops [None req-10ce455e-fd8c-4015-ac8b-e4c26a34f016 tempest-ServersTestFqdnHostnames-875224743 tempest-ServersTestFqdnHostnames-875224743-project-member] [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 2e0f116a-e8d2-45b5-bb03-624a822fb4d2 could not be found. [ 530.951192] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-10ce455e-fd8c-4015-ac8b-e4c26a34f016 tempest-ServersTestFqdnHostnames-875224743 tempest-ServersTestFqdnHostnames-875224743-project-member] [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] Instance destroyed {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 530.951733] env[62096]: INFO nova.compute.manager [None req-10ce455e-fd8c-4015-ac8b-e4c26a34f016 tempest-ServersTestFqdnHostnames-875224743 tempest-ServersTestFqdnHostnames-875224743-project-member] [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] Took 0.03 seconds to destroy the instance on the hypervisor. [ 530.952560] env[62096]: DEBUG oslo.service.loopingcall [None req-10ce455e-fd8c-4015-ac8b-e4c26a34f016 tempest-ServersTestFqdnHostnames-875224743 tempest-ServersTestFqdnHostnames-875224743-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 530.953494] env[62096]: DEBUG nova.compute.manager [-] [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 530.953686] env[62096]: DEBUG nova.network.neutron [-] [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 531.034293] env[62096]: DEBUG nova.network.neutron [-] [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 531.034821] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg a0c97ea3d077428caedc669ebea9c3b7 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 531.049743] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a0c97ea3d077428caedc669ebea9c3b7 [ 531.114303] env[62096]: DEBUG nova.compute.manager [req-24d0ad6e-039d-49a3-8f80-ba99a72d3856 req-a195684b-403f-42e0-b52d-a433f029e6c6 service nova] [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] Received event network-changed-444cedaf-2406-4024-8e04-f6253a64d8ea {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 531.114585] env[62096]: DEBUG nova.compute.manager [req-24d0ad6e-039d-49a3-8f80-ba99a72d3856 req-a195684b-403f-42e0-b52d-a433f029e6c6 service nova] [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] Refreshing instance network info cache due to event network-changed-444cedaf-2406-4024-8e04-f6253a64d8ea. {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 531.114710] env[62096]: DEBUG oslo_concurrency.lockutils [req-24d0ad6e-039d-49a3-8f80-ba99a72d3856 req-a195684b-403f-42e0-b52d-a433f029e6c6 service nova] Acquiring lock "refresh_cache-2e0f116a-e8d2-45b5-bb03-624a822fb4d2" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 531.114874] env[62096]: DEBUG oslo_concurrency.lockutils [req-24d0ad6e-039d-49a3-8f80-ba99a72d3856 req-a195684b-403f-42e0-b52d-a433f029e6c6 service nova] Acquired lock "refresh_cache-2e0f116a-e8d2-45b5-bb03-624a822fb4d2" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 531.115043] env[62096]: DEBUG nova.network.neutron [req-24d0ad6e-039d-49a3-8f80-ba99a72d3856 req-a195684b-403f-42e0-b52d-a433f029e6c6 service nova] [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] Refreshing network info cache for port 444cedaf-2406-4024-8e04-f6253a64d8ea {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 531.115472] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-24d0ad6e-039d-49a3-8f80-ba99a72d3856 req-a195684b-403f-42e0-b52d-a433f029e6c6 service nova] Expecting reply to msg 6c2efc3f493d40ba83b93034b1d7361f in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 531.124334] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6c2efc3f493d40ba83b93034b1d7361f [ 531.270293] env[62096]: DEBUG oslo_concurrency.lockutils [None req-530f9427-aadb-4fc8-9c2d-063e58e29f49 tempest-FloatingIPsAssociationTestJSON-1970391969 tempest-FloatingIPsAssociationTestJSON-1970391969-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 531.386475] env[62096]: DEBUG nova.compute.utils [None req-212fcd2e-ae1a-4ce1-9ad3-08ffa272db5b tempest-AttachInterfacesV270Test-1431958456 tempest-AttachInterfacesV270Test-1431958456-project-member] Using /dev/sd instead of None {{(pid=62096) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 531.387189] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-212fcd2e-ae1a-4ce1-9ad3-08ffa272db5b tempest-AttachInterfacesV270Test-1431958456 tempest-AttachInterfacesV270Test-1431958456-project-member] Expecting reply to msg 3b3f32049e434d53ae6d21197e84172f in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 531.389046] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-94f33e3e-2f48-4192-a8c5-afbbc2b01234 tempest-ServerDiagnosticsNegativeTest-1941278843 tempest-ServerDiagnosticsNegativeTest-1941278843-project-member] Expecting reply to msg a89b7e9607994b8dbdd2d8fbc7ef645d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 531.390291] env[62096]: DEBUG nova.compute.manager [None req-212fcd2e-ae1a-4ce1-9ad3-08ffa272db5b tempest-AttachInterfacesV270Test-1431958456 tempest-AttachInterfacesV270Test-1431958456-project-member] [instance: 84c97672-0027-43af-800d-a11c243e8825] Allocating IP information in the background. {{(pid=62096) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 531.390452] env[62096]: DEBUG nova.network.neutron [None req-212fcd2e-ae1a-4ce1-9ad3-08ffa272db5b tempest-AttachInterfacesV270Test-1431958456 tempest-AttachInterfacesV270Test-1431958456-project-member] [instance: 84c97672-0027-43af-800d-a11c243e8825] allocate_for_instance() {{(pid=62096) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 531.399356] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a89b7e9607994b8dbdd2d8fbc7ef645d [ 531.406319] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3b3f32049e434d53ae6d21197e84172f [ 531.468032] env[62096]: DEBUG nova.policy [None req-212fcd2e-ae1a-4ce1-9ad3-08ffa272db5b tempest-AttachInterfacesV270Test-1431958456 tempest-AttachInterfacesV270Test-1431958456-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd70b0d38ed5f40e590c3bf252aecd00e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7c72fea45ffe46069bdbae1716fbd656', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62096) authorize /opt/stack/nova/nova/policy.py:203}} [ 531.538965] env[62096]: DEBUG nova.network.neutron [-] [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 531.538965] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 0260697d03a24019b2902cd9208e49fa in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 531.553680] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0260697d03a24019b2902cd9208e49fa [ 531.677968] env[62096]: DEBUG nova.network.neutron [req-24d0ad6e-039d-49a3-8f80-ba99a72d3856 req-a195684b-403f-42e0-b52d-a433f029e6c6 service nova] [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 531.897397] env[62096]: DEBUG nova.compute.manager [None req-212fcd2e-ae1a-4ce1-9ad3-08ffa272db5b tempest-AttachInterfacesV270Test-1431958456 tempest-AttachInterfacesV270Test-1431958456-project-member] [instance: 84c97672-0027-43af-800d-a11c243e8825] Start building block device mappings for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 531.903473] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-212fcd2e-ae1a-4ce1-9ad3-08ffa272db5b tempest-AttachInterfacesV270Test-1431958456 tempest-AttachInterfacesV270Test-1431958456-project-member] Expecting reply to msg 1975aaab9ccd4269bcbffa744980d78d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 531.947970] env[62096]: DEBUG nova.network.neutron [req-24d0ad6e-039d-49a3-8f80-ba99a72d3856 req-a195684b-403f-42e0-b52d-a433f029e6c6 service nova] [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 531.948504] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-24d0ad6e-039d-49a3-8f80-ba99a72d3856 req-a195684b-403f-42e0-b52d-a433f029e6c6 service nova] Expecting reply to msg d94cfa13da0c41ab8df15179d82894dd in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 531.956589] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1975aaab9ccd4269bcbffa744980d78d [ 531.957601] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d94cfa13da0c41ab8df15179d82894dd [ 532.003586] env[62096]: DEBUG nova.network.neutron [None req-212fcd2e-ae1a-4ce1-9ad3-08ffa272db5b tempest-AttachInterfacesV270Test-1431958456 tempest-AttachInterfacesV270Test-1431958456-project-member] [instance: 84c97672-0027-43af-800d-a11c243e8825] Successfully created port: 61df2824-e44b-4f03-936c-841be8a9b052 {{(pid=62096) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 532.041551] env[62096]: INFO nova.compute.manager [-] [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] Took 1.09 seconds to deallocate network for instance. [ 532.046628] env[62096]: DEBUG nova.compute.claims [None req-10ce455e-fd8c-4015-ac8b-e4c26a34f016 tempest-ServersTestFqdnHostnames-875224743 tempest-ServersTestFqdnHostnames-875224743-project-member] [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] Aborting claim: {{(pid=62096) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 532.046953] env[62096]: DEBUG oslo_concurrency.lockutils [None req-10ce455e-fd8c-4015-ac8b-e4c26a34f016 tempest-ServersTestFqdnHostnames-875224743 tempest-ServersTestFqdnHostnames-875224743-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 532.078317] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30e37e04-f4e9-4d39-a26a-762930831963 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 532.086083] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ea2bf2a-dc8f-4790-9e8e-b03e3421ddab {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 532.117726] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6c9130d-f882-47d9-8f39-61cf0a46b339 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 532.125585] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e11bd40f-9713-41eb-91a6-b4883dccc036 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 532.142141] env[62096]: DEBUG nova.compute.provider_tree [None req-94f33e3e-2f48-4192-a8c5-afbbc2b01234 tempest-ServerDiagnosticsNegativeTest-1941278843 tempest-ServerDiagnosticsNegativeTest-1941278843-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 532.142657] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-94f33e3e-2f48-4192-a8c5-afbbc2b01234 tempest-ServerDiagnosticsNegativeTest-1941278843 tempest-ServerDiagnosticsNegativeTest-1941278843-project-member] Expecting reply to msg 14bb590f27d64aad9168ddc4b13b9c79 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 532.156503] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 14bb590f27d64aad9168ddc4b13b9c79 [ 532.411508] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-212fcd2e-ae1a-4ce1-9ad3-08ffa272db5b tempest-AttachInterfacesV270Test-1431958456 tempest-AttachInterfacesV270Test-1431958456-project-member] Expecting reply to msg ac0d4331e00746688cf957cf3ad01240 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 532.455116] env[62096]: DEBUG oslo_concurrency.lockutils [req-24d0ad6e-039d-49a3-8f80-ba99a72d3856 req-a195684b-403f-42e0-b52d-a433f029e6c6 service nova] Releasing lock "refresh_cache-2e0f116a-e8d2-45b5-bb03-624a822fb4d2" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 532.473129] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ac0d4331e00746688cf957cf3ad01240 [ 532.645346] env[62096]: DEBUG nova.scheduler.client.report [None req-94f33e3e-2f48-4192-a8c5-afbbc2b01234 tempest-ServerDiagnosticsNegativeTest-1941278843 tempest-ServerDiagnosticsNegativeTest-1941278843-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 532.647613] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-94f33e3e-2f48-4192-a8c5-afbbc2b01234 tempest-ServerDiagnosticsNegativeTest-1941278843 tempest-ServerDiagnosticsNegativeTest-1941278843-project-member] Expecting reply to msg 160d3ef10c1342ad8ecbfa0580e30bf9 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 532.668638] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 160d3ef10c1342ad8ecbfa0580e30bf9 [ 532.914474] env[62096]: DEBUG nova.compute.manager [None req-212fcd2e-ae1a-4ce1-9ad3-08ffa272db5b tempest-AttachInterfacesV270Test-1431958456 tempest-AttachInterfacesV270Test-1431958456-project-member] [instance: 84c97672-0027-43af-800d-a11c243e8825] Start spawning the instance on the hypervisor. {{(pid=62096) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 532.940349] env[62096]: DEBUG nova.virt.hardware [None req-212fcd2e-ae1a-4ce1-9ad3-08ffa272db5b tempest-AttachInterfacesV270Test-1431958456 tempest-AttachInterfacesV270Test-1431958456-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-19T11:52:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-19T11:51:55Z,direct_url=,disk_format='vmdk',id=fb764baa-3805-45c4-a694-aa91b0932110,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bd76085d004b408bbcf1ebf76ba2593e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-19T11:51:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 532.940576] env[62096]: DEBUG nova.virt.hardware [None req-212fcd2e-ae1a-4ce1-9ad3-08ffa272db5b tempest-AttachInterfacesV270Test-1431958456 tempest-AttachInterfacesV270Test-1431958456-project-member] Flavor limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 532.941070] env[62096]: DEBUG nova.virt.hardware [None req-212fcd2e-ae1a-4ce1-9ad3-08ffa272db5b tempest-AttachInterfacesV270Test-1431958456 tempest-AttachInterfacesV270Test-1431958456-project-member] Image limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 532.941070] env[62096]: DEBUG nova.virt.hardware [None req-212fcd2e-ae1a-4ce1-9ad3-08ffa272db5b tempest-AttachInterfacesV270Test-1431958456 tempest-AttachInterfacesV270Test-1431958456-project-member] Flavor pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 532.941070] env[62096]: DEBUG nova.virt.hardware [None req-212fcd2e-ae1a-4ce1-9ad3-08ffa272db5b tempest-AttachInterfacesV270Test-1431958456 tempest-AttachInterfacesV270Test-1431958456-project-member] Image pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 532.941241] env[62096]: DEBUG nova.virt.hardware [None req-212fcd2e-ae1a-4ce1-9ad3-08ffa272db5b tempest-AttachInterfacesV270Test-1431958456 tempest-AttachInterfacesV270Test-1431958456-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 532.941444] env[62096]: DEBUG nova.virt.hardware [None req-212fcd2e-ae1a-4ce1-9ad3-08ffa272db5b tempest-AttachInterfacesV270Test-1431958456 tempest-AttachInterfacesV270Test-1431958456-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 532.941539] env[62096]: DEBUG nova.virt.hardware [None req-212fcd2e-ae1a-4ce1-9ad3-08ffa272db5b tempest-AttachInterfacesV270Test-1431958456 tempest-AttachInterfacesV270Test-1431958456-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 532.941774] env[62096]: DEBUG nova.virt.hardware [None req-212fcd2e-ae1a-4ce1-9ad3-08ffa272db5b tempest-AttachInterfacesV270Test-1431958456 tempest-AttachInterfacesV270Test-1431958456-project-member] Got 1 possible topologies {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 532.941836] env[62096]: DEBUG nova.virt.hardware [None req-212fcd2e-ae1a-4ce1-9ad3-08ffa272db5b tempest-AttachInterfacesV270Test-1431958456 tempest-AttachInterfacesV270Test-1431958456-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 532.941974] env[62096]: DEBUG nova.virt.hardware [None req-212fcd2e-ae1a-4ce1-9ad3-08ffa272db5b tempest-AttachInterfacesV270Test-1431958456 tempest-AttachInterfacesV270Test-1431958456-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 532.942837] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98a822d0-e69f-4453-856c-5650b222dc67 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 532.951104] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65ee4990-9041-4431-89b3-59bfad1bd4b1 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 533.112925] env[62096]: ERROR nova.compute.manager [None req-212fcd2e-ae1a-4ce1-9ad3-08ffa272db5b tempest-AttachInterfacesV270Test-1431958456 tempest-AttachInterfacesV270Test-1431958456-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 61df2824-e44b-4f03-936c-841be8a9b052, please check neutron logs for more information. [ 533.112925] env[62096]: ERROR nova.compute.manager Traceback (most recent call last): [ 533.112925] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 533.112925] env[62096]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 533.112925] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 533.112925] env[62096]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 533.112925] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 533.112925] env[62096]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 533.112925] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 533.112925] env[62096]: ERROR nova.compute.manager self.force_reraise() [ 533.112925] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 533.112925] env[62096]: ERROR nova.compute.manager raise self.value [ 533.112925] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 533.112925] env[62096]: ERROR nova.compute.manager updated_port = self._update_port( [ 533.112925] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 533.112925] env[62096]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 533.113482] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 533.113482] env[62096]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 533.113482] env[62096]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 61df2824-e44b-4f03-936c-841be8a9b052, please check neutron logs for more information. [ 533.113482] env[62096]: ERROR nova.compute.manager [ 533.113482] env[62096]: Traceback (most recent call last): [ 533.113482] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 533.113482] env[62096]: listener.cb(fileno) [ 533.113482] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 533.113482] env[62096]: result = function(*args, **kwargs) [ 533.113482] env[62096]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 533.113482] env[62096]: return func(*args, **kwargs) [ 533.113482] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 533.113482] env[62096]: raise e [ 533.113482] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 533.113482] env[62096]: nwinfo = self.network_api.allocate_for_instance( [ 533.113482] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 533.113482] env[62096]: created_port_ids = self._update_ports_for_instance( [ 533.113482] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 533.113482] env[62096]: with excutils.save_and_reraise_exception(): [ 533.113482] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 533.113482] env[62096]: self.force_reraise() [ 533.113482] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 533.113482] env[62096]: raise self.value [ 533.113482] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 533.113482] env[62096]: updated_port = self._update_port( [ 533.113482] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 533.113482] env[62096]: _ensure_no_port_binding_failure(port) [ 533.113482] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 533.113482] env[62096]: raise exception.PortBindingFailed(port_id=port['id']) [ 533.114246] env[62096]: nova.exception.PortBindingFailed: Binding failed for port 61df2824-e44b-4f03-936c-841be8a9b052, please check neutron logs for more information. [ 533.114246] env[62096]: Removing descriptor: 14 [ 533.114246] env[62096]: ERROR nova.compute.manager [None req-212fcd2e-ae1a-4ce1-9ad3-08ffa272db5b tempest-AttachInterfacesV270Test-1431958456 tempest-AttachInterfacesV270Test-1431958456-project-member] [instance: 84c97672-0027-43af-800d-a11c243e8825] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 61df2824-e44b-4f03-936c-841be8a9b052, please check neutron logs for more information. [ 533.114246] env[62096]: ERROR nova.compute.manager [instance: 84c97672-0027-43af-800d-a11c243e8825] Traceback (most recent call last): [ 533.114246] env[62096]: ERROR nova.compute.manager [instance: 84c97672-0027-43af-800d-a11c243e8825] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 533.114246] env[62096]: ERROR nova.compute.manager [instance: 84c97672-0027-43af-800d-a11c243e8825] yield resources [ 533.114246] env[62096]: ERROR nova.compute.manager [instance: 84c97672-0027-43af-800d-a11c243e8825] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 533.114246] env[62096]: ERROR nova.compute.manager [instance: 84c97672-0027-43af-800d-a11c243e8825] self.driver.spawn(context, instance, image_meta, [ 533.114246] env[62096]: ERROR nova.compute.manager [instance: 84c97672-0027-43af-800d-a11c243e8825] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 533.114246] env[62096]: ERROR nova.compute.manager [instance: 84c97672-0027-43af-800d-a11c243e8825] self._vmops.spawn(context, instance, image_meta, injected_files, [ 533.114246] env[62096]: ERROR nova.compute.manager [instance: 84c97672-0027-43af-800d-a11c243e8825] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 533.114246] env[62096]: ERROR nova.compute.manager [instance: 84c97672-0027-43af-800d-a11c243e8825] vm_ref = self.build_virtual_machine(instance, [ 533.114621] env[62096]: ERROR nova.compute.manager [instance: 84c97672-0027-43af-800d-a11c243e8825] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 533.114621] env[62096]: ERROR nova.compute.manager [instance: 84c97672-0027-43af-800d-a11c243e8825] vif_infos = vmwarevif.get_vif_info(self._session, [ 533.114621] env[62096]: ERROR nova.compute.manager [instance: 84c97672-0027-43af-800d-a11c243e8825] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 533.114621] env[62096]: ERROR nova.compute.manager [instance: 84c97672-0027-43af-800d-a11c243e8825] for vif in network_info: [ 533.114621] env[62096]: ERROR nova.compute.manager [instance: 84c97672-0027-43af-800d-a11c243e8825] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 533.114621] env[62096]: ERROR nova.compute.manager [instance: 84c97672-0027-43af-800d-a11c243e8825] return self._sync_wrapper(fn, *args, **kwargs) [ 533.114621] env[62096]: ERROR nova.compute.manager [instance: 84c97672-0027-43af-800d-a11c243e8825] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 533.114621] env[62096]: ERROR nova.compute.manager [instance: 84c97672-0027-43af-800d-a11c243e8825] self.wait() [ 533.114621] env[62096]: ERROR nova.compute.manager [instance: 84c97672-0027-43af-800d-a11c243e8825] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 533.114621] env[62096]: ERROR nova.compute.manager [instance: 84c97672-0027-43af-800d-a11c243e8825] self[:] = self._gt.wait() [ 533.114621] env[62096]: ERROR nova.compute.manager [instance: 84c97672-0027-43af-800d-a11c243e8825] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 533.114621] env[62096]: ERROR nova.compute.manager [instance: 84c97672-0027-43af-800d-a11c243e8825] return self._exit_event.wait() [ 533.114621] env[62096]: ERROR nova.compute.manager [instance: 84c97672-0027-43af-800d-a11c243e8825] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 533.114970] env[62096]: ERROR nova.compute.manager [instance: 84c97672-0027-43af-800d-a11c243e8825] result = hub.switch() [ 533.114970] env[62096]: ERROR nova.compute.manager [instance: 84c97672-0027-43af-800d-a11c243e8825] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 533.114970] env[62096]: ERROR nova.compute.manager [instance: 84c97672-0027-43af-800d-a11c243e8825] return self.greenlet.switch() [ 533.114970] env[62096]: ERROR nova.compute.manager [instance: 84c97672-0027-43af-800d-a11c243e8825] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 533.114970] env[62096]: ERROR nova.compute.manager [instance: 84c97672-0027-43af-800d-a11c243e8825] result = function(*args, **kwargs) [ 533.114970] env[62096]: ERROR nova.compute.manager [instance: 84c97672-0027-43af-800d-a11c243e8825] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 533.114970] env[62096]: ERROR nova.compute.manager [instance: 84c97672-0027-43af-800d-a11c243e8825] return func(*args, **kwargs) [ 533.114970] env[62096]: ERROR nova.compute.manager [instance: 84c97672-0027-43af-800d-a11c243e8825] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 533.114970] env[62096]: ERROR nova.compute.manager [instance: 84c97672-0027-43af-800d-a11c243e8825] raise e [ 533.114970] env[62096]: ERROR nova.compute.manager [instance: 84c97672-0027-43af-800d-a11c243e8825] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 533.114970] env[62096]: ERROR nova.compute.manager [instance: 84c97672-0027-43af-800d-a11c243e8825] nwinfo = self.network_api.allocate_for_instance( [ 533.114970] env[62096]: ERROR nova.compute.manager [instance: 84c97672-0027-43af-800d-a11c243e8825] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 533.114970] env[62096]: ERROR nova.compute.manager [instance: 84c97672-0027-43af-800d-a11c243e8825] created_port_ids = self._update_ports_for_instance( [ 533.115341] env[62096]: ERROR nova.compute.manager [instance: 84c97672-0027-43af-800d-a11c243e8825] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 533.115341] env[62096]: ERROR nova.compute.manager [instance: 84c97672-0027-43af-800d-a11c243e8825] with excutils.save_and_reraise_exception(): [ 533.115341] env[62096]: ERROR nova.compute.manager [instance: 84c97672-0027-43af-800d-a11c243e8825] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 533.115341] env[62096]: ERROR nova.compute.manager [instance: 84c97672-0027-43af-800d-a11c243e8825] self.force_reraise() [ 533.115341] env[62096]: ERROR nova.compute.manager [instance: 84c97672-0027-43af-800d-a11c243e8825] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 533.115341] env[62096]: ERROR nova.compute.manager [instance: 84c97672-0027-43af-800d-a11c243e8825] raise self.value [ 533.115341] env[62096]: ERROR nova.compute.manager [instance: 84c97672-0027-43af-800d-a11c243e8825] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 533.115341] env[62096]: ERROR nova.compute.manager [instance: 84c97672-0027-43af-800d-a11c243e8825] updated_port = self._update_port( [ 533.115341] env[62096]: ERROR nova.compute.manager [instance: 84c97672-0027-43af-800d-a11c243e8825] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 533.115341] env[62096]: ERROR nova.compute.manager [instance: 84c97672-0027-43af-800d-a11c243e8825] _ensure_no_port_binding_failure(port) [ 533.115341] env[62096]: ERROR nova.compute.manager [instance: 84c97672-0027-43af-800d-a11c243e8825] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 533.115341] env[62096]: ERROR nova.compute.manager [instance: 84c97672-0027-43af-800d-a11c243e8825] raise exception.PortBindingFailed(port_id=port['id']) [ 533.115756] env[62096]: ERROR nova.compute.manager [instance: 84c97672-0027-43af-800d-a11c243e8825] nova.exception.PortBindingFailed: Binding failed for port 61df2824-e44b-4f03-936c-841be8a9b052, please check neutron logs for more information. [ 533.115756] env[62096]: ERROR nova.compute.manager [instance: 84c97672-0027-43af-800d-a11c243e8825] [ 533.115756] env[62096]: INFO nova.compute.manager [None req-212fcd2e-ae1a-4ce1-9ad3-08ffa272db5b tempest-AttachInterfacesV270Test-1431958456 tempest-AttachInterfacesV270Test-1431958456-project-member] [instance: 84c97672-0027-43af-800d-a11c243e8825] Terminating instance [ 533.117345] env[62096]: DEBUG oslo_concurrency.lockutils [None req-212fcd2e-ae1a-4ce1-9ad3-08ffa272db5b tempest-AttachInterfacesV270Test-1431958456 tempest-AttachInterfacesV270Test-1431958456-project-member] Acquiring lock "refresh_cache-84c97672-0027-43af-800d-a11c243e8825" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 533.117510] env[62096]: DEBUG oslo_concurrency.lockutils [None req-212fcd2e-ae1a-4ce1-9ad3-08ffa272db5b tempest-AttachInterfacesV270Test-1431958456 tempest-AttachInterfacesV270Test-1431958456-project-member] Acquired lock "refresh_cache-84c97672-0027-43af-800d-a11c243e8825" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 533.117672] env[62096]: DEBUG nova.network.neutron [None req-212fcd2e-ae1a-4ce1-9ad3-08ffa272db5b tempest-AttachInterfacesV270Test-1431958456 tempest-AttachInterfacesV270Test-1431958456-project-member] [instance: 84c97672-0027-43af-800d-a11c243e8825] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 533.118088] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-212fcd2e-ae1a-4ce1-9ad3-08ffa272db5b tempest-AttachInterfacesV270Test-1431958456 tempest-AttachInterfacesV270Test-1431958456-project-member] Expecting reply to msg 0dff34bcc0ed435d91234ea48fe9ddc8 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 533.126239] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0dff34bcc0ed435d91234ea48fe9ddc8 [ 533.150945] env[62096]: DEBUG oslo_concurrency.lockutils [None req-94f33e3e-2f48-4192-a8c5-afbbc2b01234 tempest-ServerDiagnosticsNegativeTest-1941278843 tempest-ServerDiagnosticsNegativeTest-1941278843-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.280s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 533.151438] env[62096]: DEBUG nova.compute.manager [None req-94f33e3e-2f48-4192-a8c5-afbbc2b01234 tempest-ServerDiagnosticsNegativeTest-1941278843 tempest-ServerDiagnosticsNegativeTest-1941278843-project-member] [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] Start building networks asynchronously for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 533.153115] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-94f33e3e-2f48-4192-a8c5-afbbc2b01234 tempest-ServerDiagnosticsNegativeTest-1941278843 tempest-ServerDiagnosticsNegativeTest-1941278843-project-member] Expecting reply to msg e8b104e6d28340038d0c885ff3e88133 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 533.154155] env[62096]: DEBUG oslo_concurrency.lockutils [None req-7dd77095-b2e2-4af3-8029-5e47b60be2ee tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.799s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 533.155566] env[62096]: INFO nova.compute.claims [None req-7dd77095-b2e2-4af3-8029-5e47b60be2ee tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 533.157408] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-7dd77095-b2e2-4af3-8029-5e47b60be2ee tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Expecting reply to msg 9499ca48d3d5465bae40d93d73f3d64b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 533.194951] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e8b104e6d28340038d0c885ff3e88133 [ 533.227516] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9499ca48d3d5465bae40d93d73f3d64b [ 533.391583] env[62096]: ERROR nova.compute.manager [None req-a07a81da-9645-47fc-94cd-e8687a5b10d0 tempest-ServerActionsTestJSON-232923977 tempest-ServerActionsTestJSON-232923977-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 00d94847-7f83-4bc0-ba71-e7943d2957af, please check neutron logs for more information. [ 533.391583] env[62096]: ERROR nova.compute.manager Traceback (most recent call last): [ 533.391583] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 533.391583] env[62096]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 533.391583] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 533.391583] env[62096]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 533.391583] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 533.391583] env[62096]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 533.391583] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 533.391583] env[62096]: ERROR nova.compute.manager self.force_reraise() [ 533.391583] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 533.391583] env[62096]: ERROR nova.compute.manager raise self.value [ 533.391583] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 533.391583] env[62096]: ERROR nova.compute.manager updated_port = self._update_port( [ 533.391583] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 533.391583] env[62096]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 533.392040] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 533.392040] env[62096]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 533.392040] env[62096]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 00d94847-7f83-4bc0-ba71-e7943d2957af, please check neutron logs for more information. [ 533.392040] env[62096]: ERROR nova.compute.manager [ 533.392040] env[62096]: Traceback (most recent call last): [ 533.392040] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 533.392040] env[62096]: listener.cb(fileno) [ 533.392040] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 533.392040] env[62096]: result = function(*args, **kwargs) [ 533.392040] env[62096]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 533.392040] env[62096]: return func(*args, **kwargs) [ 533.392040] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 533.392040] env[62096]: raise e [ 533.392040] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 533.392040] env[62096]: nwinfo = self.network_api.allocate_for_instance( [ 533.392040] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 533.392040] env[62096]: created_port_ids = self._update_ports_for_instance( [ 533.392040] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 533.392040] env[62096]: with excutils.save_and_reraise_exception(): [ 533.392040] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 533.392040] env[62096]: self.force_reraise() [ 533.392040] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 533.392040] env[62096]: raise self.value [ 533.392040] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 533.392040] env[62096]: updated_port = self._update_port( [ 533.392040] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 533.392040] env[62096]: _ensure_no_port_binding_failure(port) [ 533.392040] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 533.392040] env[62096]: raise exception.PortBindingFailed(port_id=port['id']) [ 533.392784] env[62096]: nova.exception.PortBindingFailed: Binding failed for port 00d94847-7f83-4bc0-ba71-e7943d2957af, please check neutron logs for more information. [ 533.392784] env[62096]: Removing descriptor: 16 [ 533.392784] env[62096]: ERROR nova.compute.manager [None req-a07a81da-9645-47fc-94cd-e8687a5b10d0 tempest-ServerActionsTestJSON-232923977 tempest-ServerActionsTestJSON-232923977-project-member] [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 00d94847-7f83-4bc0-ba71-e7943d2957af, please check neutron logs for more information. [ 533.392784] env[62096]: ERROR nova.compute.manager [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] Traceback (most recent call last): [ 533.392784] env[62096]: ERROR nova.compute.manager [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 533.392784] env[62096]: ERROR nova.compute.manager [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] yield resources [ 533.392784] env[62096]: ERROR nova.compute.manager [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 533.392784] env[62096]: ERROR nova.compute.manager [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] self.driver.spawn(context, instance, image_meta, [ 533.392784] env[62096]: ERROR nova.compute.manager [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 533.392784] env[62096]: ERROR nova.compute.manager [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] self._vmops.spawn(context, instance, image_meta, injected_files, [ 533.392784] env[62096]: ERROR nova.compute.manager [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 533.392784] env[62096]: ERROR nova.compute.manager [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] vm_ref = self.build_virtual_machine(instance, [ 533.393151] env[62096]: ERROR nova.compute.manager [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 533.393151] env[62096]: ERROR nova.compute.manager [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] vif_infos = vmwarevif.get_vif_info(self._session, [ 533.393151] env[62096]: ERROR nova.compute.manager [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 533.393151] env[62096]: ERROR nova.compute.manager [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] for vif in network_info: [ 533.393151] env[62096]: ERROR nova.compute.manager [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 533.393151] env[62096]: ERROR nova.compute.manager [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] return self._sync_wrapper(fn, *args, **kwargs) [ 533.393151] env[62096]: ERROR nova.compute.manager [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 533.393151] env[62096]: ERROR nova.compute.manager [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] self.wait() [ 533.393151] env[62096]: ERROR nova.compute.manager [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 533.393151] env[62096]: ERROR nova.compute.manager [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] self[:] = self._gt.wait() [ 533.393151] env[62096]: ERROR nova.compute.manager [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 533.393151] env[62096]: ERROR nova.compute.manager [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] return self._exit_event.wait() [ 533.393151] env[62096]: ERROR nova.compute.manager [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 533.393517] env[62096]: ERROR nova.compute.manager [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] result = hub.switch() [ 533.393517] env[62096]: ERROR nova.compute.manager [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 533.393517] env[62096]: ERROR nova.compute.manager [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] return self.greenlet.switch() [ 533.393517] env[62096]: ERROR nova.compute.manager [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 533.393517] env[62096]: ERROR nova.compute.manager [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] result = function(*args, **kwargs) [ 533.393517] env[62096]: ERROR nova.compute.manager [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 533.393517] env[62096]: ERROR nova.compute.manager [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] return func(*args, **kwargs) [ 533.393517] env[62096]: ERROR nova.compute.manager [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 533.393517] env[62096]: ERROR nova.compute.manager [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] raise e [ 533.393517] env[62096]: ERROR nova.compute.manager [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 533.393517] env[62096]: ERROR nova.compute.manager [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] nwinfo = self.network_api.allocate_for_instance( [ 533.393517] env[62096]: ERROR nova.compute.manager [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 533.393517] env[62096]: ERROR nova.compute.manager [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] created_port_ids = self._update_ports_for_instance( [ 533.393860] env[62096]: ERROR nova.compute.manager [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 533.393860] env[62096]: ERROR nova.compute.manager [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] with excutils.save_and_reraise_exception(): [ 533.393860] env[62096]: ERROR nova.compute.manager [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 533.393860] env[62096]: ERROR nova.compute.manager [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] self.force_reraise() [ 533.393860] env[62096]: ERROR nova.compute.manager [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 533.393860] env[62096]: ERROR nova.compute.manager [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] raise self.value [ 533.393860] env[62096]: ERROR nova.compute.manager [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 533.393860] env[62096]: ERROR nova.compute.manager [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] updated_port = self._update_port( [ 533.393860] env[62096]: ERROR nova.compute.manager [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 533.393860] env[62096]: ERROR nova.compute.manager [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] _ensure_no_port_binding_failure(port) [ 533.393860] env[62096]: ERROR nova.compute.manager [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 533.393860] env[62096]: ERROR nova.compute.manager [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] raise exception.PortBindingFailed(port_id=port['id']) [ 533.394194] env[62096]: ERROR nova.compute.manager [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] nova.exception.PortBindingFailed: Binding failed for port 00d94847-7f83-4bc0-ba71-e7943d2957af, please check neutron logs for more information. [ 533.394194] env[62096]: ERROR nova.compute.manager [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] [ 533.394194] env[62096]: INFO nova.compute.manager [None req-a07a81da-9645-47fc-94cd-e8687a5b10d0 tempest-ServerActionsTestJSON-232923977 tempest-ServerActionsTestJSON-232923977-project-member] [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] Terminating instance [ 533.396293] env[62096]: DEBUG oslo_concurrency.lockutils [None req-a07a81da-9645-47fc-94cd-e8687a5b10d0 tempest-ServerActionsTestJSON-232923977 tempest-ServerActionsTestJSON-232923977-project-member] Acquiring lock "refresh_cache-018ae11e-bc51-4e0b-8f2e-c464fca6f375" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 533.396448] env[62096]: DEBUG oslo_concurrency.lockutils [None req-a07a81da-9645-47fc-94cd-e8687a5b10d0 tempest-ServerActionsTestJSON-232923977 tempest-ServerActionsTestJSON-232923977-project-member] Acquired lock "refresh_cache-018ae11e-bc51-4e0b-8f2e-c464fca6f375" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 533.396609] env[62096]: DEBUG nova.network.neutron [None req-a07a81da-9645-47fc-94cd-e8687a5b10d0 tempest-ServerActionsTestJSON-232923977 tempest-ServerActionsTestJSON-232923977-project-member] [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 533.397024] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a07a81da-9645-47fc-94cd-e8687a5b10d0 tempest-ServerActionsTestJSON-232923977 tempest-ServerActionsTestJSON-232923977-project-member] Expecting reply to msg a8657489210c4c0d9c90e31bf753f841 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 533.406395] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a8657489210c4c0d9c90e31bf753f841 [ 533.643655] env[62096]: DEBUG nova.network.neutron [None req-212fcd2e-ae1a-4ce1-9ad3-08ffa272db5b tempest-AttachInterfacesV270Test-1431958456 tempest-AttachInterfacesV270Test-1431958456-project-member] [instance: 84c97672-0027-43af-800d-a11c243e8825] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 533.665542] env[62096]: DEBUG nova.compute.utils [None req-94f33e3e-2f48-4192-a8c5-afbbc2b01234 tempest-ServerDiagnosticsNegativeTest-1941278843 tempest-ServerDiagnosticsNegativeTest-1941278843-project-member] Using /dev/sd instead of None {{(pid=62096) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 533.666206] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-94f33e3e-2f48-4192-a8c5-afbbc2b01234 tempest-ServerDiagnosticsNegativeTest-1941278843 tempest-ServerDiagnosticsNegativeTest-1941278843-project-member] Expecting reply to msg d3449d5c4ea84957b29acc15b9068cd2 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 533.667137] env[62096]: DEBUG nova.compute.manager [None req-94f33e3e-2f48-4192-a8c5-afbbc2b01234 tempest-ServerDiagnosticsNegativeTest-1941278843 tempest-ServerDiagnosticsNegativeTest-1941278843-project-member] [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] Allocating IP information in the background. {{(pid=62096) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 533.667299] env[62096]: DEBUG nova.network.neutron [None req-94f33e3e-2f48-4192-a8c5-afbbc2b01234 tempest-ServerDiagnosticsNegativeTest-1941278843 tempest-ServerDiagnosticsNegativeTest-1941278843-project-member] [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] allocate_for_instance() {{(pid=62096) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 533.671036] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-7dd77095-b2e2-4af3-8029-5e47b60be2ee tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Expecting reply to msg 52a76b90c60c4b74bef289da82817592 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 533.679100] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d3449d5c4ea84957b29acc15b9068cd2 [ 533.679630] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 52a76b90c60c4b74bef289da82817592 [ 533.822198] env[62096]: DEBUG nova.policy [None req-94f33e3e-2f48-4192-a8c5-afbbc2b01234 tempest-ServerDiagnosticsNegativeTest-1941278843 tempest-ServerDiagnosticsNegativeTest-1941278843-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f039a4ed745a40d1b0d7ae0fb06b01fa', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ee05c7980590469faa4ab4d4173395d8', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62096) authorize /opt/stack/nova/nova/policy.py:203}} [ 533.864764] env[62096]: DEBUG nova.network.neutron [None req-212fcd2e-ae1a-4ce1-9ad3-08ffa272db5b tempest-AttachInterfacesV270Test-1431958456 tempest-AttachInterfacesV270Test-1431958456-project-member] [instance: 84c97672-0027-43af-800d-a11c243e8825] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 533.864764] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-212fcd2e-ae1a-4ce1-9ad3-08ffa272db5b tempest-AttachInterfacesV270Test-1431958456 tempest-AttachInterfacesV270Test-1431958456-project-member] Expecting reply to msg 02165f840b9149c590fc3dde8b7ca32c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 533.885528] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 02165f840b9149c590fc3dde8b7ca32c [ 533.915877] env[62096]: DEBUG nova.network.neutron [None req-a07a81da-9645-47fc-94cd-e8687a5b10d0 tempest-ServerActionsTestJSON-232923977 tempest-ServerActionsTestJSON-232923977-project-member] [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 534.003369] env[62096]: DEBUG oslo_concurrency.lockutils [None req-f939c6b6-f865-40df-a5aa-fec5b14620ab tempest-InstanceActionsNegativeTestJSON-1063701282 tempest-InstanceActionsNegativeTestJSON-1063701282-project-member] Acquiring lock "9ebe9a03-3fc2-4a77-bb2b-d220e9693115" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 534.003610] env[62096]: DEBUG oslo_concurrency.lockutils [None req-f939c6b6-f865-40df-a5aa-fec5b14620ab tempest-InstanceActionsNegativeTestJSON-1063701282 tempest-InstanceActionsNegativeTestJSON-1063701282-project-member] Lock "9ebe9a03-3fc2-4a77-bb2b-d220e9693115" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 534.004249] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-f939c6b6-f865-40df-a5aa-fec5b14620ab tempest-InstanceActionsNegativeTestJSON-1063701282 tempest-InstanceActionsNegativeTestJSON-1063701282-project-member] Expecting reply to msg 94ea36d3f0f2459884fd338ec531d116 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 534.014717] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 94ea36d3f0f2459884fd338ec531d116 [ 534.025593] env[62096]: DEBUG nova.network.neutron [None req-a07a81da-9645-47fc-94cd-e8687a5b10d0 tempest-ServerActionsTestJSON-232923977 tempest-ServerActionsTestJSON-232923977-project-member] [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 534.026295] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a07a81da-9645-47fc-94cd-e8687a5b10d0 tempest-ServerActionsTestJSON-232923977 tempest-ServerActionsTestJSON-232923977-project-member] Expecting reply to msg cba2b547136849ef926432de755f4d1c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 534.038109] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cba2b547136849ef926432de755f4d1c [ 534.173384] env[62096]: DEBUG nova.compute.manager [None req-94f33e3e-2f48-4192-a8c5-afbbc2b01234 tempest-ServerDiagnosticsNegativeTest-1941278843 tempest-ServerDiagnosticsNegativeTest-1941278843-project-member] [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] Start building block device mappings for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 534.175187] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-94f33e3e-2f48-4192-a8c5-afbbc2b01234 tempest-ServerDiagnosticsNegativeTest-1941278843 tempest-ServerDiagnosticsNegativeTest-1941278843-project-member] Expecting reply to msg a3d9a96ce4404225b4503246af73dfb1 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 534.218405] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a3d9a96ce4404225b4503246af73dfb1 [ 534.322633] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bde9d76-07d1-4f8b-9174-c3965515fa89 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.331484] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-079c9d67-f76f-4383-86d1-202fe5aa69e9 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.370177] env[62096]: DEBUG oslo_concurrency.lockutils [None req-212fcd2e-ae1a-4ce1-9ad3-08ffa272db5b tempest-AttachInterfacesV270Test-1431958456 tempest-AttachInterfacesV270Test-1431958456-project-member] Releasing lock "refresh_cache-84c97672-0027-43af-800d-a11c243e8825" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 534.370177] env[62096]: DEBUG nova.compute.manager [None req-212fcd2e-ae1a-4ce1-9ad3-08ffa272db5b tempest-AttachInterfacesV270Test-1431958456 tempest-AttachInterfacesV270Test-1431958456-project-member] [instance: 84c97672-0027-43af-800d-a11c243e8825] Start destroying the instance on the hypervisor. {{(pid=62096) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 534.370749] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-212fcd2e-ae1a-4ce1-9ad3-08ffa272db5b tempest-AttachInterfacesV270Test-1431958456 tempest-AttachInterfacesV270Test-1431958456-project-member] [instance: 84c97672-0027-43af-800d-a11c243e8825] Destroying instance {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 534.371623] env[62096]: DEBUG nova.network.neutron [None req-94f33e3e-2f48-4192-a8c5-afbbc2b01234 tempest-ServerDiagnosticsNegativeTest-1941278843 tempest-ServerDiagnosticsNegativeTest-1941278843-project-member] [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] Successfully created port: aff25b36-e8c9-4587-8762-0c53e123f24e {{(pid=62096) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 534.374569] env[62096]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-55a270dc-1514-4d72-9994-caef95c1b4c4 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.377518] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c95637f6-eff9-4de5-90c1-077f7ca27e3e {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.389021] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97ba36f8-1aca-4de0-b24b-a00be25e91d4 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.421578] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb5cd147-8569-4787-991a-a6fca1349258 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.438200] env[62096]: WARNING nova.virt.vmwareapi.vmops [None req-212fcd2e-ae1a-4ce1-9ad3-08ffa272db5b tempest-AttachInterfacesV270Test-1431958456 tempest-AttachInterfacesV270Test-1431958456-project-member] [instance: 84c97672-0027-43af-800d-a11c243e8825] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 84c97672-0027-43af-800d-a11c243e8825 could not be found. [ 534.438445] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-212fcd2e-ae1a-4ce1-9ad3-08ffa272db5b tempest-AttachInterfacesV270Test-1431958456 tempest-AttachInterfacesV270Test-1431958456-project-member] [instance: 84c97672-0027-43af-800d-a11c243e8825] Instance destroyed {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 534.438670] env[62096]: INFO nova.compute.manager [None req-212fcd2e-ae1a-4ce1-9ad3-08ffa272db5b tempest-AttachInterfacesV270Test-1431958456 tempest-AttachInterfacesV270Test-1431958456-project-member] [instance: 84c97672-0027-43af-800d-a11c243e8825] Took 0.07 seconds to destroy the instance on the hypervisor. [ 534.438938] env[62096]: DEBUG oslo.service.loopingcall [None req-212fcd2e-ae1a-4ce1-9ad3-08ffa272db5b tempest-AttachInterfacesV270Test-1431958456 tempest-AttachInterfacesV270Test-1431958456-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 534.439591] env[62096]: DEBUG nova.compute.manager [-] [instance: 84c97672-0027-43af-800d-a11c243e8825] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 534.439688] env[62096]: DEBUG nova.network.neutron [-] [instance: 84c97672-0027-43af-800d-a11c243e8825] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 534.450693] env[62096]: DEBUG nova.compute.provider_tree [None req-7dd77095-b2e2-4af3-8029-5e47b60be2ee tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Updating inventory in ProviderTree for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 534.451397] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-7dd77095-b2e2-4af3-8029-5e47b60be2ee tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Expecting reply to msg c39612ba64ce429c8dd214d483a3c9f8 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 534.461328] env[62096]: DEBUG nova.network.neutron [-] [instance: 84c97672-0027-43af-800d-a11c243e8825] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 534.461650] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 3f775c451a8e476a982090695d3de113 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 534.472191] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c39612ba64ce429c8dd214d483a3c9f8 [ 534.472191] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3f775c451a8e476a982090695d3de113 [ 534.508840] env[62096]: DEBUG nova.compute.manager [None req-f939c6b6-f865-40df-a5aa-fec5b14620ab tempest-InstanceActionsNegativeTestJSON-1063701282 tempest-InstanceActionsNegativeTestJSON-1063701282-project-member] [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] Starting instance... {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 534.510553] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-f939c6b6-f865-40df-a5aa-fec5b14620ab tempest-InstanceActionsNegativeTestJSON-1063701282 tempest-InstanceActionsNegativeTestJSON-1063701282-project-member] Expecting reply to msg e2202400e573413dbe1a8cdfbea653f8 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 534.528861] env[62096]: DEBUG oslo_concurrency.lockutils [None req-a07a81da-9645-47fc-94cd-e8687a5b10d0 tempest-ServerActionsTestJSON-232923977 tempest-ServerActionsTestJSON-232923977-project-member] Releasing lock "refresh_cache-018ae11e-bc51-4e0b-8f2e-c464fca6f375" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 534.529261] env[62096]: DEBUG nova.compute.manager [None req-a07a81da-9645-47fc-94cd-e8687a5b10d0 tempest-ServerActionsTestJSON-232923977 tempest-ServerActionsTestJSON-232923977-project-member] [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] Start destroying the instance on the hypervisor. {{(pid=62096) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 534.529444] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-a07a81da-9645-47fc-94cd-e8687a5b10d0 tempest-ServerActionsTestJSON-232923977 tempest-ServerActionsTestJSON-232923977-project-member] [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] Destroying instance {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 534.529718] env[62096]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3623cc51-7090-490b-beee-e9f60e685df8 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.538842] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8870f8d-90e5-4289-a49b-9e91edb44fac {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.560756] env[62096]: WARNING nova.virt.vmwareapi.vmops [None req-a07a81da-9645-47fc-94cd-e8687a5b10d0 tempest-ServerActionsTestJSON-232923977 tempest-ServerActionsTestJSON-232923977-project-member] [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 018ae11e-bc51-4e0b-8f2e-c464fca6f375 could not be found. [ 534.560986] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-a07a81da-9645-47fc-94cd-e8687a5b10d0 tempest-ServerActionsTestJSON-232923977 tempest-ServerActionsTestJSON-232923977-project-member] [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] Instance destroyed {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 534.561166] env[62096]: INFO nova.compute.manager [None req-a07a81da-9645-47fc-94cd-e8687a5b10d0 tempest-ServerActionsTestJSON-232923977 tempest-ServerActionsTestJSON-232923977-project-member] [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] Took 0.03 seconds to destroy the instance on the hypervisor. [ 534.561408] env[62096]: DEBUG oslo.service.loopingcall [None req-a07a81da-9645-47fc-94cd-e8687a5b10d0 tempest-ServerActionsTestJSON-232923977 tempest-ServerActionsTestJSON-232923977-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 534.562003] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e2202400e573413dbe1a8cdfbea653f8 [ 534.562394] env[62096]: DEBUG nova.compute.manager [-] [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 534.562492] env[62096]: DEBUG nova.network.neutron [-] [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 534.578100] env[62096]: DEBUG nova.network.neutron [-] [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 534.578606] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg b692492aa6dd4fffa2e5f71a3e3091ce in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 534.588474] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b692492aa6dd4fffa2e5f71a3e3091ce [ 534.683656] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-94f33e3e-2f48-4192-a8c5-afbbc2b01234 tempest-ServerDiagnosticsNegativeTest-1941278843 tempest-ServerDiagnosticsNegativeTest-1941278843-project-member] Expecting reply to msg d2ad9fcab6b0485f86cd9bf9ec0ff516 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 534.728821] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d2ad9fcab6b0485f86cd9bf9ec0ff516 [ 534.761547] env[62096]: DEBUG nova.compute.manager [req-e83d738c-963e-4456-b147-989fe82f793f req-f76840eb-328e-430e-9cb5-f1d6d4468a39 service nova] [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] Received event network-changed-00d94847-7f83-4bc0-ba71-e7943d2957af {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 534.761751] env[62096]: DEBUG nova.compute.manager [req-e83d738c-963e-4456-b147-989fe82f793f req-f76840eb-328e-430e-9cb5-f1d6d4468a39 service nova] [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] Refreshing instance network info cache due to event network-changed-00d94847-7f83-4bc0-ba71-e7943d2957af. {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 534.761967] env[62096]: DEBUG oslo_concurrency.lockutils [req-e83d738c-963e-4456-b147-989fe82f793f req-f76840eb-328e-430e-9cb5-f1d6d4468a39 service nova] Acquiring lock "refresh_cache-018ae11e-bc51-4e0b-8f2e-c464fca6f375" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 534.762106] env[62096]: DEBUG oslo_concurrency.lockutils [req-e83d738c-963e-4456-b147-989fe82f793f req-f76840eb-328e-430e-9cb5-f1d6d4468a39 service nova] Acquired lock "refresh_cache-018ae11e-bc51-4e0b-8f2e-c464fca6f375" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 534.762264] env[62096]: DEBUG nova.network.neutron [req-e83d738c-963e-4456-b147-989fe82f793f req-f76840eb-328e-430e-9cb5-f1d6d4468a39 service nova] [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] Refreshing network info cache for port 00d94847-7f83-4bc0-ba71-e7943d2957af {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 534.762698] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-e83d738c-963e-4456-b147-989fe82f793f req-f76840eb-328e-430e-9cb5-f1d6d4468a39 service nova] Expecting reply to msg 991fb110b1034600a467bd707afe627b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 534.770552] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 991fb110b1034600a467bd707afe627b [ 534.923903] env[62096]: DEBUG nova.compute.manager [req-2aa18b26-4e3c-4d72-81a2-82af69e41055 req-c2011557-cd40-4f6d-8c74-9232f803af21 service nova] [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] Received event network-vif-deleted-444cedaf-2406-4024-8e04-f6253a64d8ea {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 534.924183] env[62096]: DEBUG nova.compute.manager [req-2aa18b26-4e3c-4d72-81a2-82af69e41055 req-c2011557-cd40-4f6d-8c74-9232f803af21 service nova] [instance: 84c97672-0027-43af-800d-a11c243e8825] Received event network-changed-61df2824-e44b-4f03-936c-841be8a9b052 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 534.924352] env[62096]: DEBUG nova.compute.manager [req-2aa18b26-4e3c-4d72-81a2-82af69e41055 req-c2011557-cd40-4f6d-8c74-9232f803af21 service nova] [instance: 84c97672-0027-43af-800d-a11c243e8825] Refreshing instance network info cache due to event network-changed-61df2824-e44b-4f03-936c-841be8a9b052. {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 534.924559] env[62096]: DEBUG oslo_concurrency.lockutils [req-2aa18b26-4e3c-4d72-81a2-82af69e41055 req-c2011557-cd40-4f6d-8c74-9232f803af21 service nova] Acquiring lock "refresh_cache-84c97672-0027-43af-800d-a11c243e8825" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 534.924694] env[62096]: DEBUG oslo_concurrency.lockutils [req-2aa18b26-4e3c-4d72-81a2-82af69e41055 req-c2011557-cd40-4f6d-8c74-9232f803af21 service nova] Acquired lock "refresh_cache-84c97672-0027-43af-800d-a11c243e8825" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 534.924870] env[62096]: DEBUG nova.network.neutron [req-2aa18b26-4e3c-4d72-81a2-82af69e41055 req-c2011557-cd40-4f6d-8c74-9232f803af21 service nova] [instance: 84c97672-0027-43af-800d-a11c243e8825] Refreshing network info cache for port 61df2824-e44b-4f03-936c-841be8a9b052 {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 534.925288] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-2aa18b26-4e3c-4d72-81a2-82af69e41055 req-c2011557-cd40-4f6d-8c74-9232f803af21 service nova] Expecting reply to msg cb88051902e1477f83e59e6f42ce4151 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 534.932082] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cb88051902e1477f83e59e6f42ce4151 [ 534.963775] env[62096]: DEBUG nova.network.neutron [-] [instance: 84c97672-0027-43af-800d-a11c243e8825] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 534.968355] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg ebd597e764da4980ad864d202ed2bd8e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 534.975167] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ebd597e764da4980ad864d202ed2bd8e [ 534.976483] env[62096]: ERROR nova.scheduler.client.report [None req-7dd77095-b2e2-4af3-8029-5e47b60be2ee tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] [req-74259a65-02ce-4734-a40c-a8b12652a439] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 6eefe13c-ab55-4c03-987f-47a62756c3b3. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-74259a65-02ce-4734-a40c-a8b12652a439"}]} [ 534.995156] env[62096]: DEBUG nova.scheduler.client.report [None req-7dd77095-b2e2-4af3-8029-5e47b60be2ee tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Refreshing inventories for resource provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 535.012728] env[62096]: DEBUG nova.scheduler.client.report [None req-7dd77095-b2e2-4af3-8029-5e47b60be2ee tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Updating ProviderTree inventory for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 535.013179] env[62096]: DEBUG nova.compute.provider_tree [None req-7dd77095-b2e2-4af3-8029-5e47b60be2ee tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Updating inventory in ProviderTree for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 535.035002] env[62096]: DEBUG nova.scheduler.client.report [None req-7dd77095-b2e2-4af3-8029-5e47b60be2ee tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Refreshing aggregate associations for resource provider 6eefe13c-ab55-4c03-987f-47a62756c3b3, aggregates: 9780ed25-c4c0-457d-a8e1-86bd787b331b {{(pid=62096) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 535.045319] env[62096]: DEBUG oslo_concurrency.lockutils [None req-f939c6b6-f865-40df-a5aa-fec5b14620ab tempest-InstanceActionsNegativeTestJSON-1063701282 tempest-InstanceActionsNegativeTestJSON-1063701282-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 535.060334] env[62096]: DEBUG nova.scheduler.client.report [None req-7dd77095-b2e2-4af3-8029-5e47b60be2ee tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Refreshing trait associations for resource provider 6eefe13c-ab55-4c03-987f-47a62756c3b3, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO {{(pid=62096) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 535.085357] env[62096]: DEBUG nova.network.neutron [-] [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 535.085357] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 5e515e70475d49d7b2435f86bc808424 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 535.097887] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5e515e70475d49d7b2435f86bc808424 [ 535.187138] env[62096]: DEBUG nova.compute.manager [None req-94f33e3e-2f48-4192-a8c5-afbbc2b01234 tempest-ServerDiagnosticsNegativeTest-1941278843 tempest-ServerDiagnosticsNegativeTest-1941278843-project-member] [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] Start spawning the instance on the hypervisor. {{(pid=62096) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 535.217435] env[62096]: DEBUG nova.virt.hardware [None req-94f33e3e-2f48-4192-a8c5-afbbc2b01234 tempest-ServerDiagnosticsNegativeTest-1941278843 tempest-ServerDiagnosticsNegativeTest-1941278843-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-19T11:52:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-19T11:51:55Z,direct_url=,disk_format='vmdk',id=fb764baa-3805-45c4-a694-aa91b0932110,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bd76085d004b408bbcf1ebf76ba2593e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-19T11:51:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 535.217665] env[62096]: DEBUG nova.virt.hardware [None req-94f33e3e-2f48-4192-a8c5-afbbc2b01234 tempest-ServerDiagnosticsNegativeTest-1941278843 tempest-ServerDiagnosticsNegativeTest-1941278843-project-member] Flavor limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 535.217821] env[62096]: DEBUG nova.virt.hardware [None req-94f33e3e-2f48-4192-a8c5-afbbc2b01234 tempest-ServerDiagnosticsNegativeTest-1941278843 tempest-ServerDiagnosticsNegativeTest-1941278843-project-member] Image limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 535.217998] env[62096]: DEBUG nova.virt.hardware [None req-94f33e3e-2f48-4192-a8c5-afbbc2b01234 tempest-ServerDiagnosticsNegativeTest-1941278843 tempest-ServerDiagnosticsNegativeTest-1941278843-project-member] Flavor pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 535.218140] env[62096]: DEBUG nova.virt.hardware [None req-94f33e3e-2f48-4192-a8c5-afbbc2b01234 tempest-ServerDiagnosticsNegativeTest-1941278843 tempest-ServerDiagnosticsNegativeTest-1941278843-project-member] Image pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 535.218288] env[62096]: DEBUG nova.virt.hardware [None req-94f33e3e-2f48-4192-a8c5-afbbc2b01234 tempest-ServerDiagnosticsNegativeTest-1941278843 tempest-ServerDiagnosticsNegativeTest-1941278843-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 535.218537] env[62096]: DEBUG nova.virt.hardware [None req-94f33e3e-2f48-4192-a8c5-afbbc2b01234 tempest-ServerDiagnosticsNegativeTest-1941278843 tempest-ServerDiagnosticsNegativeTest-1941278843-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 535.218808] env[62096]: DEBUG nova.virt.hardware [None req-94f33e3e-2f48-4192-a8c5-afbbc2b01234 tempest-ServerDiagnosticsNegativeTest-1941278843 tempest-ServerDiagnosticsNegativeTest-1941278843-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 535.218808] env[62096]: DEBUG nova.virt.hardware [None req-94f33e3e-2f48-4192-a8c5-afbbc2b01234 tempest-ServerDiagnosticsNegativeTest-1941278843 tempest-ServerDiagnosticsNegativeTest-1941278843-project-member] Got 1 possible topologies {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 535.218959] env[62096]: DEBUG nova.virt.hardware [None req-94f33e3e-2f48-4192-a8c5-afbbc2b01234 tempest-ServerDiagnosticsNegativeTest-1941278843 tempest-ServerDiagnosticsNegativeTest-1941278843-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 535.219101] env[62096]: DEBUG nova.virt.hardware [None req-94f33e3e-2f48-4192-a8c5-afbbc2b01234 tempest-ServerDiagnosticsNegativeTest-1941278843 tempest-ServerDiagnosticsNegativeTest-1941278843-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 535.219952] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a12f96e-3efe-48e9-946c-9e17c6eb0f59 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 535.233036] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5bbe657-150c-41a7-bb36-83cff9c0f559 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 535.269925] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54efb024-33d3-415d-870d-2951167b34f8 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 535.278611] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ebad446-64fc-400e-9968-3bb8614cdf49 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 535.313939] env[62096]: DEBUG nova.network.neutron [req-e83d738c-963e-4456-b147-989fe82f793f req-f76840eb-328e-430e-9cb5-f1d6d4468a39 service nova] [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 535.315764] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d60e44c2-0b59-4096-bcff-7d1f082fd830 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 535.324789] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d25a89c-d803-4816-a546-0ac149ab2963 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 535.339402] env[62096]: DEBUG nova.compute.provider_tree [None req-7dd77095-b2e2-4af3-8029-5e47b60be2ee tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Updating inventory in ProviderTree for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 535.339944] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-7dd77095-b2e2-4af3-8029-5e47b60be2ee tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Expecting reply to msg cedba434804349ddb811e6fb88458347 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 535.352127] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cedba434804349ddb811e6fb88458347 [ 535.466246] env[62096]: INFO nova.compute.manager [-] [instance: 84c97672-0027-43af-800d-a11c243e8825] Took 1.03 seconds to deallocate network for instance. [ 535.468588] env[62096]: DEBUG nova.compute.claims [None req-212fcd2e-ae1a-4ce1-9ad3-08ffa272db5b tempest-AttachInterfacesV270Test-1431958456 tempest-AttachInterfacesV270Test-1431958456-project-member] [instance: 84c97672-0027-43af-800d-a11c243e8825] Aborting claim: {{(pid=62096) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 535.468779] env[62096]: DEBUG oslo_concurrency.lockutils [None req-212fcd2e-ae1a-4ce1-9ad3-08ffa272db5b tempest-AttachInterfacesV270Test-1431958456 tempest-AttachInterfacesV270Test-1431958456-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 535.592857] env[62096]: INFO nova.compute.manager [-] [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] Took 1.03 seconds to deallocate network for instance. [ 535.595177] env[62096]: DEBUG nova.compute.claims [None req-a07a81da-9645-47fc-94cd-e8687a5b10d0 tempest-ServerActionsTestJSON-232923977 tempest-ServerActionsTestJSON-232923977-project-member] [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] Aborting claim: {{(pid=62096) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 535.595384] env[62096]: DEBUG oslo_concurrency.lockutils [None req-a07a81da-9645-47fc-94cd-e8687a5b10d0 tempest-ServerActionsTestJSON-232923977 tempest-ServerActionsTestJSON-232923977-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 535.620358] env[62096]: DEBUG nova.network.neutron [req-2aa18b26-4e3c-4d72-81a2-82af69e41055 req-c2011557-cd40-4f6d-8c74-9232f803af21 service nova] [instance: 84c97672-0027-43af-800d-a11c243e8825] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 535.630495] env[62096]: DEBUG oslo_service.periodic_task [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62096) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 535.630809] env[62096]: DEBUG oslo_service.periodic_task [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62096) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 535.631061] env[62096]: DEBUG nova.compute.manager [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Starting heal instance info cache {{(pid=62096) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9926}} [ 535.631196] env[62096]: DEBUG nova.compute.manager [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Rebuilding the list of instances to heal {{(pid=62096) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 535.631780] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg eac164bf249c4283935812c471812a04 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 535.649133] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg eac164bf249c4283935812c471812a04 [ 535.882818] env[62096]: DEBUG nova.scheduler.client.report [None req-7dd77095-b2e2-4af3-8029-5e47b60be2ee tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Updated inventory for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 with generation 11 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:957}} [ 535.883098] env[62096]: DEBUG nova.compute.provider_tree [None req-7dd77095-b2e2-4af3-8029-5e47b60be2ee tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Updating resource provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 generation from 11 to 12 during operation: update_inventory {{(pid=62096) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 535.883284] env[62096]: DEBUG nova.compute.provider_tree [None req-7dd77095-b2e2-4af3-8029-5e47b60be2ee tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Updating inventory in ProviderTree for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 535.885840] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-7dd77095-b2e2-4af3-8029-5e47b60be2ee tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Expecting reply to msg 814bca8eb04d465e847f56fe6dddfc09 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 535.899443] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 814bca8eb04d465e847f56fe6dddfc09 [ 535.959719] env[62096]: DEBUG nova.network.neutron [req-e83d738c-963e-4456-b147-989fe82f793f req-f76840eb-328e-430e-9cb5-f1d6d4468a39 service nova] [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 535.959719] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-e83d738c-963e-4456-b147-989fe82f793f req-f76840eb-328e-430e-9cb5-f1d6d4468a39 service nova] Expecting reply to msg c008015046974d88b571afc01ec83974 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 535.977100] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c008015046974d88b571afc01ec83974 [ 536.116684] env[62096]: ERROR nova.compute.manager [None req-94f33e3e-2f48-4192-a8c5-afbbc2b01234 tempest-ServerDiagnosticsNegativeTest-1941278843 tempest-ServerDiagnosticsNegativeTest-1941278843-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port aff25b36-e8c9-4587-8762-0c53e123f24e, please check neutron logs for more information. [ 536.116684] env[62096]: ERROR nova.compute.manager Traceback (most recent call last): [ 536.116684] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 536.116684] env[62096]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 536.116684] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 536.116684] env[62096]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 536.116684] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 536.116684] env[62096]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 536.116684] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 536.116684] env[62096]: ERROR nova.compute.manager self.force_reraise() [ 536.116684] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 536.116684] env[62096]: ERROR nova.compute.manager raise self.value [ 536.116684] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 536.116684] env[62096]: ERROR nova.compute.manager updated_port = self._update_port( [ 536.116684] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 536.116684] env[62096]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 536.117154] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 536.117154] env[62096]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 536.117154] env[62096]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port aff25b36-e8c9-4587-8762-0c53e123f24e, please check neutron logs for more information. [ 536.117154] env[62096]: ERROR nova.compute.manager [ 536.117154] env[62096]: Traceback (most recent call last): [ 536.117154] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 536.117154] env[62096]: listener.cb(fileno) [ 536.117154] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 536.117154] env[62096]: result = function(*args, **kwargs) [ 536.117154] env[62096]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 536.117154] env[62096]: return func(*args, **kwargs) [ 536.117154] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 536.117154] env[62096]: raise e [ 536.117154] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 536.117154] env[62096]: nwinfo = self.network_api.allocate_for_instance( [ 536.117154] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 536.117154] env[62096]: created_port_ids = self._update_ports_for_instance( [ 536.117154] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 536.117154] env[62096]: with excutils.save_and_reraise_exception(): [ 536.117154] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 536.117154] env[62096]: self.force_reraise() [ 536.117154] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 536.117154] env[62096]: raise self.value [ 536.117154] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 536.117154] env[62096]: updated_port = self._update_port( [ 536.117154] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 536.117154] env[62096]: _ensure_no_port_binding_failure(port) [ 536.117154] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 536.117154] env[62096]: raise exception.PortBindingFailed(port_id=port['id']) [ 536.117889] env[62096]: nova.exception.PortBindingFailed: Binding failed for port aff25b36-e8c9-4587-8762-0c53e123f24e, please check neutron logs for more information. [ 536.117889] env[62096]: Removing descriptor: 14 [ 536.117889] env[62096]: ERROR nova.compute.manager [None req-94f33e3e-2f48-4192-a8c5-afbbc2b01234 tempest-ServerDiagnosticsNegativeTest-1941278843 tempest-ServerDiagnosticsNegativeTest-1941278843-project-member] [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port aff25b36-e8c9-4587-8762-0c53e123f24e, please check neutron logs for more information. [ 536.117889] env[62096]: ERROR nova.compute.manager [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] Traceback (most recent call last): [ 536.117889] env[62096]: ERROR nova.compute.manager [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 536.117889] env[62096]: ERROR nova.compute.manager [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] yield resources [ 536.117889] env[62096]: ERROR nova.compute.manager [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 536.117889] env[62096]: ERROR nova.compute.manager [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] self.driver.spawn(context, instance, image_meta, [ 536.117889] env[62096]: ERROR nova.compute.manager [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 536.117889] env[62096]: ERROR nova.compute.manager [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 536.117889] env[62096]: ERROR nova.compute.manager [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 536.117889] env[62096]: ERROR nova.compute.manager [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] vm_ref = self.build_virtual_machine(instance, [ 536.118215] env[62096]: ERROR nova.compute.manager [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 536.118215] env[62096]: ERROR nova.compute.manager [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] vif_infos = vmwarevif.get_vif_info(self._session, [ 536.118215] env[62096]: ERROR nova.compute.manager [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 536.118215] env[62096]: ERROR nova.compute.manager [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] for vif in network_info: [ 536.118215] env[62096]: ERROR nova.compute.manager [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 536.118215] env[62096]: ERROR nova.compute.manager [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] return self._sync_wrapper(fn, *args, **kwargs) [ 536.118215] env[62096]: ERROR nova.compute.manager [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 536.118215] env[62096]: ERROR nova.compute.manager [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] self.wait() [ 536.118215] env[62096]: ERROR nova.compute.manager [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 536.118215] env[62096]: ERROR nova.compute.manager [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] self[:] = self._gt.wait() [ 536.118215] env[62096]: ERROR nova.compute.manager [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 536.118215] env[62096]: ERROR nova.compute.manager [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] return self._exit_event.wait() [ 536.118215] env[62096]: ERROR nova.compute.manager [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 536.118603] env[62096]: ERROR nova.compute.manager [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] result = hub.switch() [ 536.118603] env[62096]: ERROR nova.compute.manager [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 536.118603] env[62096]: ERROR nova.compute.manager [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] return self.greenlet.switch() [ 536.118603] env[62096]: ERROR nova.compute.manager [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 536.118603] env[62096]: ERROR nova.compute.manager [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] result = function(*args, **kwargs) [ 536.118603] env[62096]: ERROR nova.compute.manager [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 536.118603] env[62096]: ERROR nova.compute.manager [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] return func(*args, **kwargs) [ 536.118603] env[62096]: ERROR nova.compute.manager [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 536.118603] env[62096]: ERROR nova.compute.manager [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] raise e [ 536.118603] env[62096]: ERROR nova.compute.manager [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 536.118603] env[62096]: ERROR nova.compute.manager [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] nwinfo = self.network_api.allocate_for_instance( [ 536.118603] env[62096]: ERROR nova.compute.manager [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 536.118603] env[62096]: ERROR nova.compute.manager [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] created_port_ids = self._update_ports_for_instance( [ 536.118974] env[62096]: ERROR nova.compute.manager [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 536.118974] env[62096]: ERROR nova.compute.manager [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] with excutils.save_and_reraise_exception(): [ 536.118974] env[62096]: ERROR nova.compute.manager [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 536.118974] env[62096]: ERROR nova.compute.manager [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] self.force_reraise() [ 536.118974] env[62096]: ERROR nova.compute.manager [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 536.118974] env[62096]: ERROR nova.compute.manager [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] raise self.value [ 536.118974] env[62096]: ERROR nova.compute.manager [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 536.118974] env[62096]: ERROR nova.compute.manager [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] updated_port = self._update_port( [ 536.118974] env[62096]: ERROR nova.compute.manager [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 536.118974] env[62096]: ERROR nova.compute.manager [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] _ensure_no_port_binding_failure(port) [ 536.118974] env[62096]: ERROR nova.compute.manager [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 536.118974] env[62096]: ERROR nova.compute.manager [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] raise exception.PortBindingFailed(port_id=port['id']) [ 536.119301] env[62096]: ERROR nova.compute.manager [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] nova.exception.PortBindingFailed: Binding failed for port aff25b36-e8c9-4587-8762-0c53e123f24e, please check neutron logs for more information. [ 536.119301] env[62096]: ERROR nova.compute.manager [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] [ 536.119301] env[62096]: INFO nova.compute.manager [None req-94f33e3e-2f48-4192-a8c5-afbbc2b01234 tempest-ServerDiagnosticsNegativeTest-1941278843 tempest-ServerDiagnosticsNegativeTest-1941278843-project-member] [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] Terminating instance [ 536.120029] env[62096]: DEBUG oslo_concurrency.lockutils [None req-94f33e3e-2f48-4192-a8c5-afbbc2b01234 tempest-ServerDiagnosticsNegativeTest-1941278843 tempest-ServerDiagnosticsNegativeTest-1941278843-project-member] Acquiring lock "refresh_cache-89eb1f17-1d36-4b6f-8102-04708d55a81c" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 536.121402] env[62096]: DEBUG oslo_concurrency.lockutils [None req-94f33e3e-2f48-4192-a8c5-afbbc2b01234 tempest-ServerDiagnosticsNegativeTest-1941278843 tempest-ServerDiagnosticsNegativeTest-1941278843-project-member] Acquired lock "refresh_cache-89eb1f17-1d36-4b6f-8102-04708d55a81c" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 536.121402] env[62096]: DEBUG nova.network.neutron [None req-94f33e3e-2f48-4192-a8c5-afbbc2b01234 tempest-ServerDiagnosticsNegativeTest-1941278843 tempest-ServerDiagnosticsNegativeTest-1941278843-project-member] [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 536.121402] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-94f33e3e-2f48-4192-a8c5-afbbc2b01234 tempest-ServerDiagnosticsNegativeTest-1941278843 tempest-ServerDiagnosticsNegativeTest-1941278843-project-member] Expecting reply to msg b9d8e21b3193480db5d9966b2d053c2d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 536.127685] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b9d8e21b3193480db5d9966b2d053c2d [ 536.132114] env[62096]: DEBUG nova.network.neutron [req-2aa18b26-4e3c-4d72-81a2-82af69e41055 req-c2011557-cd40-4f6d-8c74-9232f803af21 service nova] [instance: 84c97672-0027-43af-800d-a11c243e8825] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 536.132114] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-2aa18b26-4e3c-4d72-81a2-82af69e41055 req-c2011557-cd40-4f6d-8c74-9232f803af21 service nova] Expecting reply to msg f7713bdc44114eeea932b45a2049e039 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 536.136233] env[62096]: DEBUG nova.compute.manager [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] Skipping network cache update for instance because it is Building. {{(pid=62096) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9939}} [ 536.136233] env[62096]: DEBUG nova.compute.manager [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] Skipping network cache update for instance because it is Building. {{(pid=62096) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9939}} [ 536.136322] env[62096]: DEBUG nova.compute.manager [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] [instance: 84c97672-0027-43af-800d-a11c243e8825] Skipping network cache update for instance because it is Building. {{(pid=62096) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9939}} [ 536.136419] env[62096]: DEBUG nova.compute.manager [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] Skipping network cache update for instance because it is Building. {{(pid=62096) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9939}} [ 536.136537] env[62096]: DEBUG nova.compute.manager [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] Skipping network cache update for instance because it is Building. {{(pid=62096) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9939}} [ 536.136652] env[62096]: DEBUG nova.compute.manager [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Didn't find any instances for network info cache update. {{(pid=62096) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10012}} [ 536.137062] env[62096]: DEBUG oslo_service.periodic_task [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62096) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 536.137305] env[62096]: DEBUG oslo_service.periodic_task [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62096) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 536.137490] env[62096]: DEBUG oslo_service.periodic_task [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62096) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 536.137666] env[62096]: DEBUG oslo_service.periodic_task [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62096) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 536.137926] env[62096]: DEBUG oslo_service.periodic_task [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62096) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 536.138124] env[62096]: DEBUG oslo_service.periodic_task [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62096) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 536.138293] env[62096]: DEBUG nova.compute.manager [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62096) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10545}} [ 536.138433] env[62096]: DEBUG oslo_service.periodic_task [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62096) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 536.138740] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg 2d1612e732da4fd696f3ecca5961398b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 536.148603] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f7713bdc44114eeea932b45a2049e039 [ 536.148846] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2d1612e732da4fd696f3ecca5961398b [ 536.388751] env[62096]: DEBUG oslo_concurrency.lockutils [None req-7dd77095-b2e2-4af3-8029-5e47b60be2ee tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.234s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 536.389285] env[62096]: DEBUG nova.compute.manager [None req-7dd77095-b2e2-4af3-8029-5e47b60be2ee tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] Start building networks asynchronously for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 536.390946] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-7dd77095-b2e2-4af3-8029-5e47b60be2ee tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Expecting reply to msg a130dcafc1b74ccb9034aec8056d4ffc in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 536.392051] env[62096]: DEBUG oslo_concurrency.lockutils [None req-4495e51c-6f81-4215-8c28-fb72754e88b3 tempest-ServersAdminNegativeTestJSON-706848671 tempest-ServersAdminNegativeTestJSON-706848671-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.026s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 536.393273] env[62096]: INFO nova.compute.claims [None req-4495e51c-6f81-4215-8c28-fb72754e88b3 tempest-ServersAdminNegativeTestJSON-706848671 tempest-ServersAdminNegativeTestJSON-706848671-project-member] [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 536.394788] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4495e51c-6f81-4215-8c28-fb72754e88b3 tempest-ServersAdminNegativeTestJSON-706848671 tempest-ServersAdminNegativeTestJSON-706848671-project-member] Expecting reply to msg 0add445ee9b04bac938147adef66b3ca in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 536.426252] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a130dcafc1b74ccb9034aec8056d4ffc [ 536.441763] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0add445ee9b04bac938147adef66b3ca [ 536.463115] env[62096]: DEBUG oslo_concurrency.lockutils [req-e83d738c-963e-4456-b147-989fe82f793f req-f76840eb-328e-430e-9cb5-f1d6d4468a39 service nova] Releasing lock "refresh_cache-018ae11e-bc51-4e0b-8f2e-c464fca6f375" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 536.632885] env[62096]: DEBUG oslo_concurrency.lockutils [req-2aa18b26-4e3c-4d72-81a2-82af69e41055 req-c2011557-cd40-4f6d-8c74-9232f803af21 service nova] Releasing lock "refresh_cache-84c97672-0027-43af-800d-a11c243e8825" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 536.633126] env[62096]: DEBUG nova.compute.manager [req-2aa18b26-4e3c-4d72-81a2-82af69e41055 req-c2011557-cd40-4f6d-8c74-9232f803af21 service nova] [instance: 84c97672-0027-43af-800d-a11c243e8825] Received event network-vif-deleted-61df2824-e44b-4f03-936c-841be8a9b052 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 536.643236] env[62096]: DEBUG oslo_concurrency.lockutils [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 536.671277] env[62096]: DEBUG nova.network.neutron [None req-94f33e3e-2f48-4192-a8c5-afbbc2b01234 tempest-ServerDiagnosticsNegativeTest-1941278843 tempest-ServerDiagnosticsNegativeTest-1941278843-project-member] [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 536.898236] env[62096]: DEBUG nova.compute.utils [None req-7dd77095-b2e2-4af3-8029-5e47b60be2ee tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Using /dev/sd instead of None {{(pid=62096) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 536.898884] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-7dd77095-b2e2-4af3-8029-5e47b60be2ee tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Expecting reply to msg ed24a3c6bfcd49fe81ef6666152b4cb8 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 536.901014] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4495e51c-6f81-4215-8c28-fb72754e88b3 tempest-ServersAdminNegativeTestJSON-706848671 tempest-ServersAdminNegativeTestJSON-706848671-project-member] Expecting reply to msg fb1948cbfdc04669bbd7ed1a6cf9914e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 536.901845] env[62096]: DEBUG nova.compute.manager [None req-7dd77095-b2e2-4af3-8029-5e47b60be2ee tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] Allocating IP information in the background. {{(pid=62096) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 536.906856] env[62096]: DEBUG nova.network.neutron [None req-7dd77095-b2e2-4af3-8029-5e47b60be2ee tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] allocate_for_instance() {{(pid=62096) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 536.906856] env[62096]: DEBUG nova.network.neutron [None req-94f33e3e-2f48-4192-a8c5-afbbc2b01234 tempest-ServerDiagnosticsNegativeTest-1941278843 tempest-ServerDiagnosticsNegativeTest-1941278843-project-member] [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 536.908117] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-94f33e3e-2f48-4192-a8c5-afbbc2b01234 tempest-ServerDiagnosticsNegativeTest-1941278843 tempest-ServerDiagnosticsNegativeTest-1941278843-project-member] Expecting reply to msg edefcd2d31bc4f27906514d089311e33 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 536.913096] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ed24a3c6bfcd49fe81ef6666152b4cb8 [ 536.914338] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fb1948cbfdc04669bbd7ed1a6cf9914e [ 536.917699] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg edefcd2d31bc4f27906514d089311e33 [ 537.060056] env[62096]: DEBUG nova.policy [None req-7dd77095-b2e2-4af3-8029-5e47b60be2ee tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5ccdfb211bb74d039d058dab4d4ab0d8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '080433d09b5f4a62a950fd1278aff52a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62096) authorize /opt/stack/nova/nova/policy.py:203}} [ 537.404049] env[62096]: DEBUG nova.compute.manager [None req-7dd77095-b2e2-4af3-8029-5e47b60be2ee tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] Start building block device mappings for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 537.405779] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-7dd77095-b2e2-4af3-8029-5e47b60be2ee tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Expecting reply to msg e9bf0d1eb37a4ef5a92a1737437f59b3 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 537.410483] env[62096]: DEBUG oslo_concurrency.lockutils [None req-94f33e3e-2f48-4192-a8c5-afbbc2b01234 tempest-ServerDiagnosticsNegativeTest-1941278843 tempest-ServerDiagnosticsNegativeTest-1941278843-project-member] Releasing lock "refresh_cache-89eb1f17-1d36-4b6f-8102-04708d55a81c" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 537.410483] env[62096]: DEBUG nova.compute.manager [None req-94f33e3e-2f48-4192-a8c5-afbbc2b01234 tempest-ServerDiagnosticsNegativeTest-1941278843 tempest-ServerDiagnosticsNegativeTest-1941278843-project-member] [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] Start destroying the instance on the hypervisor. {{(pid=62096) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 537.410751] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-94f33e3e-2f48-4192-a8c5-afbbc2b01234 tempest-ServerDiagnosticsNegativeTest-1941278843 tempest-ServerDiagnosticsNegativeTest-1941278843-project-member] [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] Destroying instance {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 537.411143] env[62096]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-307e86ee-dcf4-44a8-b370-a478d1c7e98d {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.429532] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f83d2cf-897e-45a7-b9be-5382603ade3a {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.455289] env[62096]: WARNING nova.virt.vmwareapi.vmops [None req-94f33e3e-2f48-4192-a8c5-afbbc2b01234 tempest-ServerDiagnosticsNegativeTest-1941278843 tempest-ServerDiagnosticsNegativeTest-1941278843-project-member] [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 89eb1f17-1d36-4b6f-8102-04708d55a81c could not be found. [ 537.455507] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-94f33e3e-2f48-4192-a8c5-afbbc2b01234 tempest-ServerDiagnosticsNegativeTest-1941278843 tempest-ServerDiagnosticsNegativeTest-1941278843-project-member] [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] Instance destroyed {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 537.455689] env[62096]: INFO nova.compute.manager [None req-94f33e3e-2f48-4192-a8c5-afbbc2b01234 tempest-ServerDiagnosticsNegativeTest-1941278843 tempest-ServerDiagnosticsNegativeTest-1941278843-project-member] [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] Took 0.05 seconds to destroy the instance on the hypervisor. [ 537.455935] env[62096]: DEBUG oslo.service.loopingcall [None req-94f33e3e-2f48-4192-a8c5-afbbc2b01234 tempest-ServerDiagnosticsNegativeTest-1941278843 tempest-ServerDiagnosticsNegativeTest-1941278843-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 537.456203] env[62096]: DEBUG nova.compute.manager [-] [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 537.456271] env[62096]: DEBUG nova.network.neutron [-] [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 537.467167] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e9bf0d1eb37a4ef5a92a1737437f59b3 [ 537.490598] env[62096]: DEBUG nova.network.neutron [-] [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 537.491125] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 76d46f4040ea4df8b2d176765fe238cd in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 537.522227] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 76d46f4040ea4df8b2d176765fe238cd [ 537.651476] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3199fc28-f387-45b7-b7a5-84bfb4e84830 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.660208] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40c79f34-f9b2-4826-8ba1-07b90efd03ab {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.692729] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0e4b8c7-40fe-48c4-9283-7fef2f2300f9 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.701224] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e28b8d89-7710-4db8-8c8b-78e3dfb75867 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.716148] env[62096]: DEBUG nova.compute.provider_tree [None req-4495e51c-6f81-4215-8c28-fb72754e88b3 tempest-ServersAdminNegativeTestJSON-706848671 tempest-ServersAdminNegativeTestJSON-706848671-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 537.716628] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4495e51c-6f81-4215-8c28-fb72754e88b3 tempest-ServersAdminNegativeTestJSON-706848671 tempest-ServersAdminNegativeTestJSON-706848671-project-member] Expecting reply to msg 8915812c963e4682848c8878339b3464 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 537.720310] env[62096]: DEBUG nova.compute.manager [req-0a5c4063-603e-4e14-aac4-be4e25ec7074 req-d6ca89fd-c448-475d-9b09-8926508aab94 service nova] [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] Received event network-vif-deleted-00d94847-7f83-4bc0-ba71-e7943d2957af {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 537.723731] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8915812c963e4682848c8878339b3464 [ 537.806241] env[62096]: DEBUG nova.compute.manager [req-831fbd3c-69ee-4ad9-a695-7d7eb4df8961 req-ee67cf9e-56b2-486c-b6d9-a9ec9357aeee service nova] [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] Received event network-changed-aff25b36-e8c9-4587-8762-0c53e123f24e {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 537.806473] env[62096]: DEBUG nova.compute.manager [req-831fbd3c-69ee-4ad9-a695-7d7eb4df8961 req-ee67cf9e-56b2-486c-b6d9-a9ec9357aeee service nova] [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] Refreshing instance network info cache due to event network-changed-aff25b36-e8c9-4587-8762-0c53e123f24e. {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 537.806683] env[62096]: DEBUG oslo_concurrency.lockutils [req-831fbd3c-69ee-4ad9-a695-7d7eb4df8961 req-ee67cf9e-56b2-486c-b6d9-a9ec9357aeee service nova] Acquiring lock "refresh_cache-89eb1f17-1d36-4b6f-8102-04708d55a81c" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 537.806816] env[62096]: DEBUG oslo_concurrency.lockutils [req-831fbd3c-69ee-4ad9-a695-7d7eb4df8961 req-ee67cf9e-56b2-486c-b6d9-a9ec9357aeee service nova] Acquired lock "refresh_cache-89eb1f17-1d36-4b6f-8102-04708d55a81c" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 537.806964] env[62096]: DEBUG nova.network.neutron [req-831fbd3c-69ee-4ad9-a695-7d7eb4df8961 req-ee67cf9e-56b2-486c-b6d9-a9ec9357aeee service nova] [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] Refreshing network info cache for port aff25b36-e8c9-4587-8762-0c53e123f24e {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 537.807622] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-831fbd3c-69ee-4ad9-a695-7d7eb4df8961 req-ee67cf9e-56b2-486c-b6d9-a9ec9357aeee service nova] Expecting reply to msg 78f29d7015c746ecb9e9c642bc4909c0 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 537.815245] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 78f29d7015c746ecb9e9c642bc4909c0 [ 537.913774] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-7dd77095-b2e2-4af3-8029-5e47b60be2ee tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Expecting reply to msg 19039ced4a2842e99322597b3fc23ee0 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 537.955526] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 19039ced4a2842e99322597b3fc23ee0 [ 538.006404] env[62096]: DEBUG nova.network.neutron [-] [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 538.006404] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg ce31eb9160f745ceb15540fc5f6370c5 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 538.016190] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ce31eb9160f745ceb15540fc5f6370c5 [ 538.167317] env[62096]: DEBUG nova.network.neutron [None req-7dd77095-b2e2-4af3-8029-5e47b60be2ee tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] Successfully created port: 61b8e775-6352-4a95-8d43-acb5384ff45e {{(pid=62096) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 538.219389] env[62096]: DEBUG nova.scheduler.client.report [None req-4495e51c-6f81-4215-8c28-fb72754e88b3 tempest-ServersAdminNegativeTestJSON-706848671 tempest-ServersAdminNegativeTestJSON-706848671-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 538.221749] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4495e51c-6f81-4215-8c28-fb72754e88b3 tempest-ServersAdminNegativeTestJSON-706848671 tempest-ServersAdminNegativeTestJSON-706848671-project-member] Expecting reply to msg d7b18af6362a4cd6ac0c4a7aea548bf0 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 538.234615] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d7b18af6362a4cd6ac0c4a7aea548bf0 [ 538.341307] env[62096]: DEBUG nova.network.neutron [req-831fbd3c-69ee-4ad9-a695-7d7eb4df8961 req-ee67cf9e-56b2-486c-b6d9-a9ec9357aeee service nova] [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 538.418503] env[62096]: DEBUG nova.compute.manager [None req-7dd77095-b2e2-4af3-8029-5e47b60be2ee tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] Start spawning the instance on the hypervisor. {{(pid=62096) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 538.440484] env[62096]: DEBUG nova.virt.hardware [None req-7dd77095-b2e2-4af3-8029-5e47b60be2ee tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-19T11:52:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-19T11:51:55Z,direct_url=,disk_format='vmdk',id=fb764baa-3805-45c4-a694-aa91b0932110,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bd76085d004b408bbcf1ebf76ba2593e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-19T11:51:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 538.440791] env[62096]: DEBUG nova.virt.hardware [None req-7dd77095-b2e2-4af3-8029-5e47b60be2ee tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Flavor limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 538.441005] env[62096]: DEBUG nova.virt.hardware [None req-7dd77095-b2e2-4af3-8029-5e47b60be2ee tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Image limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 538.441256] env[62096]: DEBUG nova.virt.hardware [None req-7dd77095-b2e2-4af3-8029-5e47b60be2ee tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Flavor pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 538.441508] env[62096]: DEBUG nova.virt.hardware [None req-7dd77095-b2e2-4af3-8029-5e47b60be2ee tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Image pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 538.441780] env[62096]: DEBUG nova.virt.hardware [None req-7dd77095-b2e2-4af3-8029-5e47b60be2ee tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 538.442085] env[62096]: DEBUG nova.virt.hardware [None req-7dd77095-b2e2-4af3-8029-5e47b60be2ee tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 538.442300] env[62096]: DEBUG nova.virt.hardware [None req-7dd77095-b2e2-4af3-8029-5e47b60be2ee tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 538.442500] env[62096]: DEBUG nova.virt.hardware [None req-7dd77095-b2e2-4af3-8029-5e47b60be2ee tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Got 1 possible topologies {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 538.442746] env[62096]: DEBUG nova.virt.hardware [None req-7dd77095-b2e2-4af3-8029-5e47b60be2ee tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 538.443523] env[62096]: DEBUG nova.virt.hardware [None req-7dd77095-b2e2-4af3-8029-5e47b60be2ee tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 538.444168] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bb48326-8fe9-4de3-a660-151b60ebb965 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.454162] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3677804d-95f0-466b-baa8-9035c8de5ade {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.509094] env[62096]: INFO nova.compute.manager [-] [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] Took 1.05 seconds to deallocate network for instance. [ 538.511832] env[62096]: DEBUG nova.compute.claims [None req-94f33e3e-2f48-4192-a8c5-afbbc2b01234 tempest-ServerDiagnosticsNegativeTest-1941278843 tempest-ServerDiagnosticsNegativeTest-1941278843-project-member] [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] Aborting claim: {{(pid=62096) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 538.512093] env[62096]: DEBUG oslo_concurrency.lockutils [None req-94f33e3e-2f48-4192-a8c5-afbbc2b01234 tempest-ServerDiagnosticsNegativeTest-1941278843 tempest-ServerDiagnosticsNegativeTest-1941278843-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 538.568846] env[62096]: DEBUG nova.network.neutron [req-831fbd3c-69ee-4ad9-a695-7d7eb4df8961 req-ee67cf9e-56b2-486c-b6d9-a9ec9357aeee service nova] [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 538.568846] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-831fbd3c-69ee-4ad9-a695-7d7eb4df8961 req-ee67cf9e-56b2-486c-b6d9-a9ec9357aeee service nova] Expecting reply to msg e05a859770cd4b909469a5384264907d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 538.579327] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e05a859770cd4b909469a5384264907d [ 538.732927] env[62096]: DEBUG oslo_concurrency.lockutils [None req-4495e51c-6f81-4215-8c28-fb72754e88b3 tempest-ServersAdminNegativeTestJSON-706848671 tempest-ServersAdminNegativeTestJSON-706848671-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.333s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 538.732927] env[62096]: DEBUG nova.compute.manager [None req-4495e51c-6f81-4215-8c28-fb72754e88b3 tempest-ServersAdminNegativeTestJSON-706848671 tempest-ServersAdminNegativeTestJSON-706848671-project-member] [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] Start building networks asynchronously for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 538.734098] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4495e51c-6f81-4215-8c28-fb72754e88b3 tempest-ServersAdminNegativeTestJSON-706848671 tempest-ServersAdminNegativeTestJSON-706848671-project-member] Expecting reply to msg e36bad4757574ef38e261e4355fefad6 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 538.737032] env[62096]: DEBUG oslo_concurrency.lockutils [None req-530f9427-aadb-4fc8-9c2d-063e58e29f49 tempest-FloatingIPsAssociationTestJSON-1970391969 tempest-FloatingIPsAssociationTestJSON-1970391969-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.465s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 538.737164] env[62096]: INFO nova.compute.claims [None req-530f9427-aadb-4fc8-9c2d-063e58e29f49 tempest-FloatingIPsAssociationTestJSON-1970391969 tempest-FloatingIPsAssociationTestJSON-1970391969-project-member] [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 538.738947] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-530f9427-aadb-4fc8-9c2d-063e58e29f49 tempest-FloatingIPsAssociationTestJSON-1970391969 tempest-FloatingIPsAssociationTestJSON-1970391969-project-member] Expecting reply to msg d7554b09d26b4ebf9387ac16fdfce79d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 538.809916] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e36bad4757574ef38e261e4355fefad6 [ 538.822914] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d7554b09d26b4ebf9387ac16fdfce79d [ 539.080467] env[62096]: DEBUG oslo_concurrency.lockutils [req-831fbd3c-69ee-4ad9-a695-7d7eb4df8961 req-ee67cf9e-56b2-486c-b6d9-a9ec9357aeee service nova] Releasing lock "refresh_cache-89eb1f17-1d36-4b6f-8102-04708d55a81c" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 539.080467] env[62096]: DEBUG nova.compute.manager [req-831fbd3c-69ee-4ad9-a695-7d7eb4df8961 req-ee67cf9e-56b2-486c-b6d9-a9ec9357aeee service nova] [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] Received event network-vif-deleted-aff25b36-e8c9-4587-8762-0c53e123f24e {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 539.243353] env[62096]: DEBUG nova.compute.utils [None req-4495e51c-6f81-4215-8c28-fb72754e88b3 tempest-ServersAdminNegativeTestJSON-706848671 tempest-ServersAdminNegativeTestJSON-706848671-project-member] Using /dev/sd instead of None {{(pid=62096) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 539.244077] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4495e51c-6f81-4215-8c28-fb72754e88b3 tempest-ServersAdminNegativeTestJSON-706848671 tempest-ServersAdminNegativeTestJSON-706848671-project-member] Expecting reply to msg 0597fb2ab1944077ae9bbb22fbc47e1c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 539.248163] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-530f9427-aadb-4fc8-9c2d-063e58e29f49 tempest-FloatingIPsAssociationTestJSON-1970391969 tempest-FloatingIPsAssociationTestJSON-1970391969-project-member] Expecting reply to msg 8274b68dd2a344ab816b7e3359ac77b1 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 539.248163] env[62096]: DEBUG nova.compute.manager [None req-4495e51c-6f81-4215-8c28-fb72754e88b3 tempest-ServersAdminNegativeTestJSON-706848671 tempest-ServersAdminNegativeTestJSON-706848671-project-member] [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] Allocating IP information in the background. {{(pid=62096) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 539.248163] env[62096]: DEBUG nova.network.neutron [None req-4495e51c-6f81-4215-8c28-fb72754e88b3 tempest-ServersAdminNegativeTestJSON-706848671 tempest-ServersAdminNegativeTestJSON-706848671-project-member] [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] allocate_for_instance() {{(pid=62096) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 539.256320] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0597fb2ab1944077ae9bbb22fbc47e1c [ 539.259340] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8274b68dd2a344ab816b7e3359ac77b1 [ 539.289698] env[62096]: DEBUG oslo_concurrency.lockutils [None req-a5e0ed8f-7bbe-4249-b49a-d529c1e372e6 tempest-ServerActionsTestOtherB-733566255 tempest-ServerActionsTestOtherB-733566255-project-member] Acquiring lock "773f55a5-d40c-445e-b02a-268d4f88fdd7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 539.289939] env[62096]: DEBUG oslo_concurrency.lockutils [None req-a5e0ed8f-7bbe-4249-b49a-d529c1e372e6 tempest-ServerActionsTestOtherB-733566255 tempest-ServerActionsTestOtherB-733566255-project-member] Lock "773f55a5-d40c-445e-b02a-268d4f88fdd7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 539.290455] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a5e0ed8f-7bbe-4249-b49a-d529c1e372e6 tempest-ServerActionsTestOtherB-733566255 tempest-ServerActionsTestOtherB-733566255-project-member] Expecting reply to msg 96d9a80f5a724896a9c3cec1138abf3d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 539.302924] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 96d9a80f5a724896a9c3cec1138abf3d [ 539.341955] env[62096]: DEBUG nova.policy [None req-4495e51c-6f81-4215-8c28-fb72754e88b3 tempest-ServersAdminNegativeTestJSON-706848671 tempest-ServersAdminNegativeTestJSON-706848671-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '376f438cf2804dbd968c717eecaeaffc', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4535705f87e34bc89806c4c3182a3fbb', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62096) authorize /opt/stack/nova/nova/policy.py:203}} [ 539.747841] env[62096]: DEBUG nova.compute.manager [None req-4495e51c-6f81-4215-8c28-fb72754e88b3 tempest-ServersAdminNegativeTestJSON-706848671 tempest-ServersAdminNegativeTestJSON-706848671-project-member] [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] Start building block device mappings for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 539.749587] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4495e51c-6f81-4215-8c28-fb72754e88b3 tempest-ServersAdminNegativeTestJSON-706848671 tempest-ServersAdminNegativeTestJSON-706848671-project-member] Expecting reply to msg ad58277531224f25a3cbd7b064e134a1 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 539.793008] env[62096]: DEBUG nova.compute.manager [None req-a5e0ed8f-7bbe-4249-b49a-d529c1e372e6 tempest-ServerActionsTestOtherB-733566255 tempest-ServerActionsTestOtherB-733566255-project-member] [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] Starting instance... {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 539.794862] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a5e0ed8f-7bbe-4249-b49a-d529c1e372e6 tempest-ServerActionsTestOtherB-733566255 tempest-ServerActionsTestOtherB-733566255-project-member] Expecting reply to msg ce1d193a96ba4327872a775fbacc6af2 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 539.796418] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ad58277531224f25a3cbd7b064e134a1 [ 539.852619] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ce1d193a96ba4327872a775fbacc6af2 [ 539.921421] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf9c9401-21b8-45e4-a7de-333435a66de5 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 539.928883] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68f5e4d2-1804-48bc-9ab4-dac3e6910920 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 539.957762] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9580dd7a-aec7-4268-99fa-7e35fb75087a {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 539.965008] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3af3f19-3908-49e2-ad00-c0f8034a4d27 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 539.977680] env[62096]: DEBUG nova.compute.provider_tree [None req-530f9427-aadb-4fc8-9c2d-063e58e29f49 tempest-FloatingIPsAssociationTestJSON-1970391969 tempest-FloatingIPsAssociationTestJSON-1970391969-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 539.978184] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-530f9427-aadb-4fc8-9c2d-063e58e29f49 tempest-FloatingIPsAssociationTestJSON-1970391969 tempest-FloatingIPsAssociationTestJSON-1970391969-project-member] Expecting reply to msg 3d25f3a64cc4476e96a6b0697e8300d0 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 539.986919] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3d25f3a64cc4476e96a6b0697e8300d0 [ 540.256588] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4495e51c-6f81-4215-8c28-fb72754e88b3 tempest-ServersAdminNegativeTestJSON-706848671 tempest-ServersAdminNegativeTestJSON-706848671-project-member] Expecting reply to msg 58cc1037bcf84a088853fdecd78ca806 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 540.294196] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 58cc1037bcf84a088853fdecd78ca806 [ 540.322465] env[62096]: DEBUG oslo_concurrency.lockutils [None req-a5e0ed8f-7bbe-4249-b49a-d529c1e372e6 tempest-ServerActionsTestOtherB-733566255 tempest-ServerActionsTestOtherB-733566255-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 540.480834] env[62096]: DEBUG nova.scheduler.client.report [None req-530f9427-aadb-4fc8-9c2d-063e58e29f49 tempest-FloatingIPsAssociationTestJSON-1970391969 tempest-FloatingIPsAssociationTestJSON-1970391969-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 540.483290] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-530f9427-aadb-4fc8-9c2d-063e58e29f49 tempest-FloatingIPsAssociationTestJSON-1970391969 tempest-FloatingIPsAssociationTestJSON-1970391969-project-member] Expecting reply to msg 96e2911218db4e19a13cd3f1105243f9 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 540.498087] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 96e2911218db4e19a13cd3f1105243f9 [ 540.582583] env[62096]: DEBUG nova.network.neutron [None req-4495e51c-6f81-4215-8c28-fb72754e88b3 tempest-ServersAdminNegativeTestJSON-706848671 tempest-ServersAdminNegativeTestJSON-706848671-project-member] [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] Successfully created port: cc3376b5-d3cb-420a-804b-c6a03e40dccd {{(pid=62096) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 540.758978] env[62096]: DEBUG nova.compute.manager [None req-4495e51c-6f81-4215-8c28-fb72754e88b3 tempest-ServersAdminNegativeTestJSON-706848671 tempest-ServersAdminNegativeTestJSON-706848671-project-member] [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] Start spawning the instance on the hypervisor. {{(pid=62096) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 540.783504] env[62096]: DEBUG nova.virt.hardware [None req-4495e51c-6f81-4215-8c28-fb72754e88b3 tempest-ServersAdminNegativeTestJSON-706848671 tempest-ServersAdminNegativeTestJSON-706848671-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-19T11:52:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-19T11:51:55Z,direct_url=,disk_format='vmdk',id=fb764baa-3805-45c4-a694-aa91b0932110,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bd76085d004b408bbcf1ebf76ba2593e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-19T11:51:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 540.783756] env[62096]: DEBUG nova.virt.hardware [None req-4495e51c-6f81-4215-8c28-fb72754e88b3 tempest-ServersAdminNegativeTestJSON-706848671 tempest-ServersAdminNegativeTestJSON-706848671-project-member] Flavor limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 540.783943] env[62096]: DEBUG nova.virt.hardware [None req-4495e51c-6f81-4215-8c28-fb72754e88b3 tempest-ServersAdminNegativeTestJSON-706848671 tempest-ServersAdminNegativeTestJSON-706848671-project-member] Image limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 540.784174] env[62096]: DEBUG nova.virt.hardware [None req-4495e51c-6f81-4215-8c28-fb72754e88b3 tempest-ServersAdminNegativeTestJSON-706848671 tempest-ServersAdminNegativeTestJSON-706848671-project-member] Flavor pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 540.784333] env[62096]: DEBUG nova.virt.hardware [None req-4495e51c-6f81-4215-8c28-fb72754e88b3 tempest-ServersAdminNegativeTestJSON-706848671 tempest-ServersAdminNegativeTestJSON-706848671-project-member] Image pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 540.784481] env[62096]: DEBUG nova.virt.hardware [None req-4495e51c-6f81-4215-8c28-fb72754e88b3 tempest-ServersAdminNegativeTestJSON-706848671 tempest-ServersAdminNegativeTestJSON-706848671-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 540.784694] env[62096]: DEBUG nova.virt.hardware [None req-4495e51c-6f81-4215-8c28-fb72754e88b3 tempest-ServersAdminNegativeTestJSON-706848671 tempest-ServersAdminNegativeTestJSON-706848671-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 540.784881] env[62096]: DEBUG nova.virt.hardware [None req-4495e51c-6f81-4215-8c28-fb72754e88b3 tempest-ServersAdminNegativeTestJSON-706848671 tempest-ServersAdminNegativeTestJSON-706848671-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 540.785057] env[62096]: DEBUG nova.virt.hardware [None req-4495e51c-6f81-4215-8c28-fb72754e88b3 tempest-ServersAdminNegativeTestJSON-706848671 tempest-ServersAdminNegativeTestJSON-706848671-project-member] Got 1 possible topologies {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 540.785227] env[62096]: DEBUG nova.virt.hardware [None req-4495e51c-6f81-4215-8c28-fb72754e88b3 tempest-ServersAdminNegativeTestJSON-706848671 tempest-ServersAdminNegativeTestJSON-706848671-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 540.785403] env[62096]: DEBUG nova.virt.hardware [None req-4495e51c-6f81-4215-8c28-fb72754e88b3 tempest-ServersAdminNegativeTestJSON-706848671 tempest-ServersAdminNegativeTestJSON-706848671-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 540.786351] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21d27e4c-4b95-4f3c-aadf-ea13ffa79ec3 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 540.795489] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4193e22c-af5a-44a3-b996-0cc96a2dd9e3 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 540.988463] env[62096]: DEBUG oslo_concurrency.lockutils [None req-530f9427-aadb-4fc8-9c2d-063e58e29f49 tempest-FloatingIPsAssociationTestJSON-1970391969 tempest-FloatingIPsAssociationTestJSON-1970391969-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.252s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 540.988463] env[62096]: DEBUG nova.compute.manager [None req-530f9427-aadb-4fc8-9c2d-063e58e29f49 tempest-FloatingIPsAssociationTestJSON-1970391969 tempest-FloatingIPsAssociationTestJSON-1970391969-project-member] [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] Start building networks asynchronously for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 540.989900] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-530f9427-aadb-4fc8-9c2d-063e58e29f49 tempest-FloatingIPsAssociationTestJSON-1970391969 tempest-FloatingIPsAssociationTestJSON-1970391969-project-member] Expecting reply to msg 7075bf8bf3e144aba081ecc1c265beb6 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 540.990956] env[62096]: DEBUG oslo_concurrency.lockutils [None req-10ce455e-fd8c-4015-ac8b-e4c26a34f016 tempest-ServersTestFqdnHostnames-875224743 tempest-ServersTestFqdnHostnames-875224743-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 8.944s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 540.992768] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-10ce455e-fd8c-4015-ac8b-e4c26a34f016 tempest-ServersTestFqdnHostnames-875224743 tempest-ServersTestFqdnHostnames-875224743-project-member] Expecting reply to msg 65766d09542d4c94b5914cb2a02490ab in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 541.040068] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7075bf8bf3e144aba081ecc1c265beb6 [ 541.044124] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 65766d09542d4c94b5914cb2a02490ab [ 541.089670] env[62096]: DEBUG oslo_concurrency.lockutils [None req-4d9c1482-ab74-4f69-8438-670cc92231c6 tempest-VolumesAssistedSnapshotsTest-1011581688 tempest-VolumesAssistedSnapshotsTest-1011581688-project-member] Acquiring lock "3530e93e-f729-471b-976b-e52c3182cb15" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 541.089893] env[62096]: DEBUG oslo_concurrency.lockutils [None req-4d9c1482-ab74-4f69-8438-670cc92231c6 tempest-VolumesAssistedSnapshotsTest-1011581688 tempest-VolumesAssistedSnapshotsTest-1011581688-project-member] Lock "3530e93e-f729-471b-976b-e52c3182cb15" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 541.090353] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4d9c1482-ab74-4f69-8438-670cc92231c6 tempest-VolumesAssistedSnapshotsTest-1011581688 tempest-VolumesAssistedSnapshotsTest-1011581688-project-member] Expecting reply to msg 0135cc25a5a44767bc457c55070e8410 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 541.101078] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0135cc25a5a44767bc457c55070e8410 [ 541.366890] env[62096]: ERROR nova.compute.manager [None req-7dd77095-b2e2-4af3-8029-5e47b60be2ee tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 61b8e775-6352-4a95-8d43-acb5384ff45e, please check neutron logs for more information. [ 541.366890] env[62096]: ERROR nova.compute.manager Traceback (most recent call last): [ 541.366890] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 541.366890] env[62096]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 541.366890] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 541.366890] env[62096]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 541.366890] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 541.366890] env[62096]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 541.366890] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 541.366890] env[62096]: ERROR nova.compute.manager self.force_reraise() [ 541.366890] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 541.366890] env[62096]: ERROR nova.compute.manager raise self.value [ 541.366890] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 541.366890] env[62096]: ERROR nova.compute.manager updated_port = self._update_port( [ 541.366890] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 541.366890] env[62096]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 541.367307] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 541.367307] env[62096]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 541.367307] env[62096]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 61b8e775-6352-4a95-8d43-acb5384ff45e, please check neutron logs for more information. [ 541.367307] env[62096]: ERROR nova.compute.manager [ 541.367307] env[62096]: Traceback (most recent call last): [ 541.367307] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 541.367307] env[62096]: listener.cb(fileno) [ 541.367307] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 541.367307] env[62096]: result = function(*args, **kwargs) [ 541.367307] env[62096]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 541.367307] env[62096]: return func(*args, **kwargs) [ 541.367307] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 541.367307] env[62096]: raise e [ 541.367307] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 541.367307] env[62096]: nwinfo = self.network_api.allocate_for_instance( [ 541.367307] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 541.367307] env[62096]: created_port_ids = self._update_ports_for_instance( [ 541.367307] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 541.367307] env[62096]: with excutils.save_and_reraise_exception(): [ 541.367307] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 541.367307] env[62096]: self.force_reraise() [ 541.367307] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 541.367307] env[62096]: raise self.value [ 541.367307] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 541.367307] env[62096]: updated_port = self._update_port( [ 541.367307] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 541.367307] env[62096]: _ensure_no_port_binding_failure(port) [ 541.367307] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 541.367307] env[62096]: raise exception.PortBindingFailed(port_id=port['id']) [ 541.367987] env[62096]: nova.exception.PortBindingFailed: Binding failed for port 61b8e775-6352-4a95-8d43-acb5384ff45e, please check neutron logs for more information. [ 541.367987] env[62096]: Removing descriptor: 14 [ 541.367987] env[62096]: ERROR nova.compute.manager [None req-7dd77095-b2e2-4af3-8029-5e47b60be2ee tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 61b8e775-6352-4a95-8d43-acb5384ff45e, please check neutron logs for more information. [ 541.367987] env[62096]: ERROR nova.compute.manager [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] Traceback (most recent call last): [ 541.367987] env[62096]: ERROR nova.compute.manager [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 541.367987] env[62096]: ERROR nova.compute.manager [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] yield resources [ 541.367987] env[62096]: ERROR nova.compute.manager [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 541.367987] env[62096]: ERROR nova.compute.manager [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] self.driver.spawn(context, instance, image_meta, [ 541.367987] env[62096]: ERROR nova.compute.manager [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 541.367987] env[62096]: ERROR nova.compute.manager [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] self._vmops.spawn(context, instance, image_meta, injected_files, [ 541.367987] env[62096]: ERROR nova.compute.manager [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 541.367987] env[62096]: ERROR nova.compute.manager [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] vm_ref = self.build_virtual_machine(instance, [ 541.368686] env[62096]: ERROR nova.compute.manager [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 541.368686] env[62096]: ERROR nova.compute.manager [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] vif_infos = vmwarevif.get_vif_info(self._session, [ 541.368686] env[62096]: ERROR nova.compute.manager [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 541.368686] env[62096]: ERROR nova.compute.manager [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] for vif in network_info: [ 541.368686] env[62096]: ERROR nova.compute.manager [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 541.368686] env[62096]: ERROR nova.compute.manager [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] return self._sync_wrapper(fn, *args, **kwargs) [ 541.368686] env[62096]: ERROR nova.compute.manager [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 541.368686] env[62096]: ERROR nova.compute.manager [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] self.wait() [ 541.368686] env[62096]: ERROR nova.compute.manager [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 541.368686] env[62096]: ERROR nova.compute.manager [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] self[:] = self._gt.wait() [ 541.368686] env[62096]: ERROR nova.compute.manager [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 541.368686] env[62096]: ERROR nova.compute.manager [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] return self._exit_event.wait() [ 541.368686] env[62096]: ERROR nova.compute.manager [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 541.369161] env[62096]: ERROR nova.compute.manager [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] result = hub.switch() [ 541.369161] env[62096]: ERROR nova.compute.manager [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 541.369161] env[62096]: ERROR nova.compute.manager [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] return self.greenlet.switch() [ 541.369161] env[62096]: ERROR nova.compute.manager [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 541.369161] env[62096]: ERROR nova.compute.manager [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] result = function(*args, **kwargs) [ 541.369161] env[62096]: ERROR nova.compute.manager [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 541.369161] env[62096]: ERROR nova.compute.manager [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] return func(*args, **kwargs) [ 541.369161] env[62096]: ERROR nova.compute.manager [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 541.369161] env[62096]: ERROR nova.compute.manager [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] raise e [ 541.369161] env[62096]: ERROR nova.compute.manager [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 541.369161] env[62096]: ERROR nova.compute.manager [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] nwinfo = self.network_api.allocate_for_instance( [ 541.369161] env[62096]: ERROR nova.compute.manager [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 541.369161] env[62096]: ERROR nova.compute.manager [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] created_port_ids = self._update_ports_for_instance( [ 541.369626] env[62096]: ERROR nova.compute.manager [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 541.369626] env[62096]: ERROR nova.compute.manager [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] with excutils.save_and_reraise_exception(): [ 541.369626] env[62096]: ERROR nova.compute.manager [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 541.369626] env[62096]: ERROR nova.compute.manager [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] self.force_reraise() [ 541.369626] env[62096]: ERROR nova.compute.manager [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 541.369626] env[62096]: ERROR nova.compute.manager [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] raise self.value [ 541.369626] env[62096]: ERROR nova.compute.manager [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 541.369626] env[62096]: ERROR nova.compute.manager [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] updated_port = self._update_port( [ 541.369626] env[62096]: ERROR nova.compute.manager [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 541.369626] env[62096]: ERROR nova.compute.manager [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] _ensure_no_port_binding_failure(port) [ 541.369626] env[62096]: ERROR nova.compute.manager [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 541.369626] env[62096]: ERROR nova.compute.manager [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] raise exception.PortBindingFailed(port_id=port['id']) [ 541.371107] env[62096]: ERROR nova.compute.manager [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] nova.exception.PortBindingFailed: Binding failed for port 61b8e775-6352-4a95-8d43-acb5384ff45e, please check neutron logs for more information. [ 541.371107] env[62096]: ERROR nova.compute.manager [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] [ 541.371107] env[62096]: INFO nova.compute.manager [None req-7dd77095-b2e2-4af3-8029-5e47b60be2ee tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] Terminating instance [ 541.371107] env[62096]: DEBUG oslo_concurrency.lockutils [None req-7dd77095-b2e2-4af3-8029-5e47b60be2ee tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Acquiring lock "refresh_cache-5ea9e99b-dffd-4b11-97ff-634551c00745" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 541.371107] env[62096]: DEBUG oslo_concurrency.lockutils [None req-7dd77095-b2e2-4af3-8029-5e47b60be2ee tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Acquired lock "refresh_cache-5ea9e99b-dffd-4b11-97ff-634551c00745" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 541.371107] env[62096]: DEBUG nova.network.neutron [None req-7dd77095-b2e2-4af3-8029-5e47b60be2ee tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 541.371508] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-7dd77095-b2e2-4af3-8029-5e47b60be2ee tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Expecting reply to msg b400b89333784cf89f9eb46edc436c80 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 541.379185] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b400b89333784cf89f9eb46edc436c80 [ 541.495857] env[62096]: DEBUG nova.compute.utils [None req-530f9427-aadb-4fc8-9c2d-063e58e29f49 tempest-FloatingIPsAssociationTestJSON-1970391969 tempest-FloatingIPsAssociationTestJSON-1970391969-project-member] Using /dev/sd instead of None {{(pid=62096) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 541.496566] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-530f9427-aadb-4fc8-9c2d-063e58e29f49 tempest-FloatingIPsAssociationTestJSON-1970391969 tempest-FloatingIPsAssociationTestJSON-1970391969-project-member] Expecting reply to msg b5b58ce6da6340969f96a831f93d5285 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 541.502723] env[62096]: DEBUG nova.compute.manager [None req-530f9427-aadb-4fc8-9c2d-063e58e29f49 tempest-FloatingIPsAssociationTestJSON-1970391969 tempest-FloatingIPsAssociationTestJSON-1970391969-project-member] [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] Allocating IP information in the background. {{(pid=62096) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 541.502723] env[62096]: DEBUG nova.network.neutron [None req-530f9427-aadb-4fc8-9c2d-063e58e29f49 tempest-FloatingIPsAssociationTestJSON-1970391969 tempest-FloatingIPsAssociationTestJSON-1970391969-project-member] [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] allocate_for_instance() {{(pid=62096) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 541.508627] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b5b58ce6da6340969f96a831f93d5285 [ 541.592327] env[62096]: DEBUG nova.compute.manager [None req-4d9c1482-ab74-4f69-8438-670cc92231c6 tempest-VolumesAssistedSnapshotsTest-1011581688 tempest-VolumesAssistedSnapshotsTest-1011581688-project-member] [instance: 3530e93e-f729-471b-976b-e52c3182cb15] Starting instance... {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 541.594245] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4d9c1482-ab74-4f69-8438-670cc92231c6 tempest-VolumesAssistedSnapshotsTest-1011581688 tempest-VolumesAssistedSnapshotsTest-1011581688-project-member] Expecting reply to msg 9e407b96e95449458af986727f37f9cc in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 541.606998] env[62096]: DEBUG nova.policy [None req-530f9427-aadb-4fc8-9c2d-063e58e29f49 tempest-FloatingIPsAssociationTestJSON-1970391969 tempest-FloatingIPsAssociationTestJSON-1970391969-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c73f8b132ee4403396fc0b51ebeed403', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1f1b0a2cd20340da82883f2db008316c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62096) authorize /opt/stack/nova/nova/policy.py:203}} [ 541.635695] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9e407b96e95449458af986727f37f9cc [ 541.749125] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-177ececa-e328-49e2-a058-a67567d39ffd {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.757730] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22e2f199-278f-4aa1-af3a-5afc1b9f2360 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.791290] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7642623-ff71-4fad-921e-814688ed2380 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.799218] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68b52771-44c8-4fa4-b3ca-7931e2ccd965 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.814543] env[62096]: DEBUG nova.compute.provider_tree [None req-10ce455e-fd8c-4015-ac8b-e4c26a34f016 tempest-ServersTestFqdnHostnames-875224743 tempest-ServersTestFqdnHostnames-875224743-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 541.815122] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-10ce455e-fd8c-4015-ac8b-e4c26a34f016 tempest-ServersTestFqdnHostnames-875224743 tempest-ServersTestFqdnHostnames-875224743-project-member] Expecting reply to msg 76d7968f32454ce4a23c1a0cbe9e8733 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 541.836142] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 76d7968f32454ce4a23c1a0cbe9e8733 [ 541.930567] env[62096]: DEBUG nova.network.neutron [None req-7dd77095-b2e2-4af3-8029-5e47b60be2ee tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 542.001706] env[62096]: DEBUG nova.compute.manager [None req-530f9427-aadb-4fc8-9c2d-063e58e29f49 tempest-FloatingIPsAssociationTestJSON-1970391969 tempest-FloatingIPsAssociationTestJSON-1970391969-project-member] [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] Start building block device mappings for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 542.003473] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-530f9427-aadb-4fc8-9c2d-063e58e29f49 tempest-FloatingIPsAssociationTestJSON-1970391969 tempest-FloatingIPsAssociationTestJSON-1970391969-project-member] Expecting reply to msg 7efc709747c441dcbc2e606004f08e3d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 542.063042] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7efc709747c441dcbc2e606004f08e3d [ 542.116636] env[62096]: DEBUG oslo_concurrency.lockutils [None req-4d9c1482-ab74-4f69-8438-670cc92231c6 tempest-VolumesAssistedSnapshotsTest-1011581688 tempest-VolumesAssistedSnapshotsTest-1011581688-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 542.257007] env[62096]: DEBUG nova.network.neutron [None req-7dd77095-b2e2-4af3-8029-5e47b60be2ee tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 542.257503] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-7dd77095-b2e2-4af3-8029-5e47b60be2ee tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Expecting reply to msg 1c5954ec18d64dc39c6e2d7e07d9028d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 542.268415] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1c5954ec18d64dc39c6e2d7e07d9028d [ 542.317417] env[62096]: DEBUG nova.scheduler.client.report [None req-10ce455e-fd8c-4015-ac8b-e4c26a34f016 tempest-ServersTestFqdnHostnames-875224743 tempest-ServersTestFqdnHostnames-875224743-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 542.319704] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-10ce455e-fd8c-4015-ac8b-e4c26a34f016 tempest-ServersTestFqdnHostnames-875224743 tempest-ServersTestFqdnHostnames-875224743-project-member] Expecting reply to msg 67925f0521d34e2789c3b61912619754 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 542.331562] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 67925f0521d34e2789c3b61912619754 [ 542.477177] env[62096]: DEBUG nova.network.neutron [None req-530f9427-aadb-4fc8-9c2d-063e58e29f49 tempest-FloatingIPsAssociationTestJSON-1970391969 tempest-FloatingIPsAssociationTestJSON-1970391969-project-member] [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] Successfully created port: 76965fb1-e16e-43bd-854f-ab0f78dc111e {{(pid=62096) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 542.509707] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-530f9427-aadb-4fc8-9c2d-063e58e29f49 tempest-FloatingIPsAssociationTestJSON-1970391969 tempest-FloatingIPsAssociationTestJSON-1970391969-project-member] Expecting reply to msg 2d68d4e544e1472d8c8abc94ef8cfd94 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 542.546520] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2d68d4e544e1472d8c8abc94ef8cfd94 [ 542.713034] env[62096]: DEBUG nova.compute.manager [req-545b6f98-2607-4e93-830b-2fa0c189ba16 req-0062759c-8bab-45e4-b966-f213b088c254 service nova] [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] Received event network-changed-61b8e775-6352-4a95-8d43-acb5384ff45e {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 542.713034] env[62096]: DEBUG nova.compute.manager [req-545b6f98-2607-4e93-830b-2fa0c189ba16 req-0062759c-8bab-45e4-b966-f213b088c254 service nova] [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] Refreshing instance network info cache due to event network-changed-61b8e775-6352-4a95-8d43-acb5384ff45e. {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 542.713034] env[62096]: DEBUG oslo_concurrency.lockutils [req-545b6f98-2607-4e93-830b-2fa0c189ba16 req-0062759c-8bab-45e4-b966-f213b088c254 service nova] Acquiring lock "refresh_cache-5ea9e99b-dffd-4b11-97ff-634551c00745" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 542.760188] env[62096]: DEBUG oslo_concurrency.lockutils [None req-7dd77095-b2e2-4af3-8029-5e47b60be2ee tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Releasing lock "refresh_cache-5ea9e99b-dffd-4b11-97ff-634551c00745" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 542.760631] env[62096]: DEBUG nova.compute.manager [None req-7dd77095-b2e2-4af3-8029-5e47b60be2ee tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] Start destroying the instance on the hypervisor. {{(pid=62096) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 542.760819] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-7dd77095-b2e2-4af3-8029-5e47b60be2ee tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] Destroying instance {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 542.761170] env[62096]: DEBUG oslo_concurrency.lockutils [req-545b6f98-2607-4e93-830b-2fa0c189ba16 req-0062759c-8bab-45e4-b966-f213b088c254 service nova] Acquired lock "refresh_cache-5ea9e99b-dffd-4b11-97ff-634551c00745" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 542.761812] env[62096]: DEBUG nova.network.neutron [req-545b6f98-2607-4e93-830b-2fa0c189ba16 req-0062759c-8bab-45e4-b966-f213b088c254 service nova] [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] Refreshing network info cache for port 61b8e775-6352-4a95-8d43-acb5384ff45e {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 542.761812] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-545b6f98-2607-4e93-830b-2fa0c189ba16 req-0062759c-8bab-45e4-b966-f213b088c254 service nova] Expecting reply to msg 923e6bb78f4943b8a9cb17bb0a661338 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 542.762661] env[62096]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-07f5f2b8-0ebb-4747-b500-d12b2cb65491 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.779522] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88ae7513-6773-4065-998c-b9542061d086 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.791274] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 923e6bb78f4943b8a9cb17bb0a661338 [ 542.803606] env[62096]: WARNING nova.virt.vmwareapi.vmops [None req-7dd77095-b2e2-4af3-8029-5e47b60be2ee tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 5ea9e99b-dffd-4b11-97ff-634551c00745 could not be found. [ 542.803889] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-7dd77095-b2e2-4af3-8029-5e47b60be2ee tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] Instance destroyed {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 542.804619] env[62096]: INFO nova.compute.manager [None req-7dd77095-b2e2-4af3-8029-5e47b60be2ee tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] Took 0.04 seconds to destroy the instance on the hypervisor. [ 542.804937] env[62096]: DEBUG oslo.service.loopingcall [None req-7dd77095-b2e2-4af3-8029-5e47b60be2ee tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 542.805255] env[62096]: DEBUG nova.compute.manager [-] [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 542.805354] env[62096]: DEBUG nova.network.neutron [-] [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 542.822629] env[62096]: DEBUG oslo_concurrency.lockutils [None req-10ce455e-fd8c-4015-ac8b-e4c26a34f016 tempest-ServersTestFqdnHostnames-875224743 tempest-ServersTestFqdnHostnames-875224743-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.832s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 542.823296] env[62096]: ERROR nova.compute.manager [None req-10ce455e-fd8c-4015-ac8b-e4c26a34f016 tempest-ServersTestFqdnHostnames-875224743 tempest-ServersTestFqdnHostnames-875224743-project-member] [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 444cedaf-2406-4024-8e04-f6253a64d8ea, please check neutron logs for more information. [ 542.823296] env[62096]: ERROR nova.compute.manager [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] Traceback (most recent call last): [ 542.823296] env[62096]: ERROR nova.compute.manager [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 542.823296] env[62096]: ERROR nova.compute.manager [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] self.driver.spawn(context, instance, image_meta, [ 542.823296] env[62096]: ERROR nova.compute.manager [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 542.823296] env[62096]: ERROR nova.compute.manager [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 542.823296] env[62096]: ERROR nova.compute.manager [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 542.823296] env[62096]: ERROR nova.compute.manager [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] vm_ref = self.build_virtual_machine(instance, [ 542.823296] env[62096]: ERROR nova.compute.manager [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 542.823296] env[62096]: ERROR nova.compute.manager [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] vif_infos = vmwarevif.get_vif_info(self._session, [ 542.823296] env[62096]: ERROR nova.compute.manager [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 542.823634] env[62096]: ERROR nova.compute.manager [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] for vif in network_info: [ 542.823634] env[62096]: ERROR nova.compute.manager [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 542.823634] env[62096]: ERROR nova.compute.manager [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] return self._sync_wrapper(fn, *args, **kwargs) [ 542.823634] env[62096]: ERROR nova.compute.manager [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 542.823634] env[62096]: ERROR nova.compute.manager [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] self.wait() [ 542.823634] env[62096]: ERROR nova.compute.manager [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 542.823634] env[62096]: ERROR nova.compute.manager [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] self[:] = self._gt.wait() [ 542.823634] env[62096]: ERROR nova.compute.manager [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 542.823634] env[62096]: ERROR nova.compute.manager [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] return self._exit_event.wait() [ 542.823634] env[62096]: ERROR nova.compute.manager [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 542.823634] env[62096]: ERROR nova.compute.manager [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] result = hub.switch() [ 542.823634] env[62096]: ERROR nova.compute.manager [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 542.823634] env[62096]: ERROR nova.compute.manager [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] return self.greenlet.switch() [ 542.823976] env[62096]: ERROR nova.compute.manager [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 542.823976] env[62096]: ERROR nova.compute.manager [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] result = function(*args, **kwargs) [ 542.823976] env[62096]: ERROR nova.compute.manager [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 542.823976] env[62096]: ERROR nova.compute.manager [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] return func(*args, **kwargs) [ 542.823976] env[62096]: ERROR nova.compute.manager [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 542.823976] env[62096]: ERROR nova.compute.manager [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] raise e [ 542.823976] env[62096]: ERROR nova.compute.manager [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 542.823976] env[62096]: ERROR nova.compute.manager [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] nwinfo = self.network_api.allocate_for_instance( [ 542.823976] env[62096]: ERROR nova.compute.manager [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 542.823976] env[62096]: ERROR nova.compute.manager [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] created_port_ids = self._update_ports_for_instance( [ 542.823976] env[62096]: ERROR nova.compute.manager [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 542.823976] env[62096]: ERROR nova.compute.manager [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] with excutils.save_and_reraise_exception(): [ 542.823976] env[62096]: ERROR nova.compute.manager [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 542.824316] env[62096]: ERROR nova.compute.manager [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] self.force_reraise() [ 542.824316] env[62096]: ERROR nova.compute.manager [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 542.824316] env[62096]: ERROR nova.compute.manager [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] raise self.value [ 542.824316] env[62096]: ERROR nova.compute.manager [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 542.824316] env[62096]: ERROR nova.compute.manager [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] updated_port = self._update_port( [ 542.824316] env[62096]: ERROR nova.compute.manager [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 542.824316] env[62096]: ERROR nova.compute.manager [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] _ensure_no_port_binding_failure(port) [ 542.824316] env[62096]: ERROR nova.compute.manager [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 542.824316] env[62096]: ERROR nova.compute.manager [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] raise exception.PortBindingFailed(port_id=port['id']) [ 542.824316] env[62096]: ERROR nova.compute.manager [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] nova.exception.PortBindingFailed: Binding failed for port 444cedaf-2406-4024-8e04-f6253a64d8ea, please check neutron logs for more information. [ 542.824316] env[62096]: ERROR nova.compute.manager [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] [ 542.824653] env[62096]: DEBUG nova.compute.utils [None req-10ce455e-fd8c-4015-ac8b-e4c26a34f016 tempest-ServersTestFqdnHostnames-875224743 tempest-ServersTestFqdnHostnames-875224743-project-member] [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] Binding failed for port 444cedaf-2406-4024-8e04-f6253a64d8ea, please check neutron logs for more information. {{(pid=62096) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 542.825332] env[62096]: DEBUG oslo_concurrency.lockutils [None req-f939c6b6-f865-40df-a5aa-fec5b14620ab tempest-InstanceActionsNegativeTestJSON-1063701282 tempest-InstanceActionsNegativeTestJSON-1063701282-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.780s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 542.826866] env[62096]: INFO nova.compute.claims [None req-f939c6b6-f865-40df-a5aa-fec5b14620ab tempest-InstanceActionsNegativeTestJSON-1063701282 tempest-InstanceActionsNegativeTestJSON-1063701282-project-member] [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 542.828751] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-f939c6b6-f865-40df-a5aa-fec5b14620ab tempest-InstanceActionsNegativeTestJSON-1063701282 tempest-InstanceActionsNegativeTestJSON-1063701282-project-member] Expecting reply to msg 8056e416caf24124a16d6ad9346bedfb in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 542.831491] env[62096]: DEBUG nova.compute.manager [None req-10ce455e-fd8c-4015-ac8b-e4c26a34f016 tempest-ServersTestFqdnHostnames-875224743 tempest-ServersTestFqdnHostnames-875224743-project-member] [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] Build of instance 2e0f116a-e8d2-45b5-bb03-624a822fb4d2 was re-scheduled: Binding failed for port 444cedaf-2406-4024-8e04-f6253a64d8ea, please check neutron logs for more information. {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 542.831996] env[62096]: DEBUG nova.compute.manager [None req-10ce455e-fd8c-4015-ac8b-e4c26a34f016 tempest-ServersTestFqdnHostnames-875224743 tempest-ServersTestFqdnHostnames-875224743-project-member] [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] Unplugging VIFs for instance {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 542.832248] env[62096]: DEBUG oslo_concurrency.lockutils [None req-10ce455e-fd8c-4015-ac8b-e4c26a34f016 tempest-ServersTestFqdnHostnames-875224743 tempest-ServersTestFqdnHostnames-875224743-project-member] Acquiring lock "refresh_cache-2e0f116a-e8d2-45b5-bb03-624a822fb4d2" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 542.832402] env[62096]: DEBUG oslo_concurrency.lockutils [None req-10ce455e-fd8c-4015-ac8b-e4c26a34f016 tempest-ServersTestFqdnHostnames-875224743 tempest-ServersTestFqdnHostnames-875224743-project-member] Acquired lock "refresh_cache-2e0f116a-e8d2-45b5-bb03-624a822fb4d2" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 542.832638] env[62096]: DEBUG nova.network.neutron [None req-10ce455e-fd8c-4015-ac8b-e4c26a34f016 tempest-ServersTestFqdnHostnames-875224743 tempest-ServersTestFqdnHostnames-875224743-project-member] [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 542.833057] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-10ce455e-fd8c-4015-ac8b-e4c26a34f016 tempest-ServersTestFqdnHostnames-875224743 tempest-ServersTestFqdnHostnames-875224743-project-member] Expecting reply to msg 88f08c479e3c43b7bed244430260df61 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 542.835505] env[62096]: DEBUG nova.network.neutron [-] [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 542.835505] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 714f6de49e34405ea681bd2bf2f495f1 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 542.840516] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 88f08c479e3c43b7bed244430260df61 [ 542.845773] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 714f6de49e34405ea681bd2bf2f495f1 [ 542.884106] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8056e416caf24124a16d6ad9346bedfb [ 543.018099] env[62096]: DEBUG nova.compute.manager [None req-530f9427-aadb-4fc8-9c2d-063e58e29f49 tempest-FloatingIPsAssociationTestJSON-1970391969 tempest-FloatingIPsAssociationTestJSON-1970391969-project-member] [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] Start spawning the instance on the hypervisor. {{(pid=62096) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 543.042746] env[62096]: DEBUG nova.virt.hardware [None req-530f9427-aadb-4fc8-9c2d-063e58e29f49 tempest-FloatingIPsAssociationTestJSON-1970391969 tempest-FloatingIPsAssociationTestJSON-1970391969-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-19T11:52:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-19T11:51:55Z,direct_url=,disk_format='vmdk',id=fb764baa-3805-45c4-a694-aa91b0932110,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bd76085d004b408bbcf1ebf76ba2593e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-19T11:51:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 543.042997] env[62096]: DEBUG nova.virt.hardware [None req-530f9427-aadb-4fc8-9c2d-063e58e29f49 tempest-FloatingIPsAssociationTestJSON-1970391969 tempest-FloatingIPsAssociationTestJSON-1970391969-project-member] Flavor limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 543.043145] env[62096]: DEBUG nova.virt.hardware [None req-530f9427-aadb-4fc8-9c2d-063e58e29f49 tempest-FloatingIPsAssociationTestJSON-1970391969 tempest-FloatingIPsAssociationTestJSON-1970391969-project-member] Image limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 543.043319] env[62096]: DEBUG nova.virt.hardware [None req-530f9427-aadb-4fc8-9c2d-063e58e29f49 tempest-FloatingIPsAssociationTestJSON-1970391969 tempest-FloatingIPsAssociationTestJSON-1970391969-project-member] Flavor pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 543.043459] env[62096]: DEBUG nova.virt.hardware [None req-530f9427-aadb-4fc8-9c2d-063e58e29f49 tempest-FloatingIPsAssociationTestJSON-1970391969 tempest-FloatingIPsAssociationTestJSON-1970391969-project-member] Image pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 543.043606] env[62096]: DEBUG nova.virt.hardware [None req-530f9427-aadb-4fc8-9c2d-063e58e29f49 tempest-FloatingIPsAssociationTestJSON-1970391969 tempest-FloatingIPsAssociationTestJSON-1970391969-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 543.043808] env[62096]: DEBUG nova.virt.hardware [None req-530f9427-aadb-4fc8-9c2d-063e58e29f49 tempest-FloatingIPsAssociationTestJSON-1970391969 tempest-FloatingIPsAssociationTestJSON-1970391969-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 543.043995] env[62096]: DEBUG nova.virt.hardware [None req-530f9427-aadb-4fc8-9c2d-063e58e29f49 tempest-FloatingIPsAssociationTestJSON-1970391969 tempest-FloatingIPsAssociationTestJSON-1970391969-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 543.044216] env[62096]: DEBUG nova.virt.hardware [None req-530f9427-aadb-4fc8-9c2d-063e58e29f49 tempest-FloatingIPsAssociationTestJSON-1970391969 tempest-FloatingIPsAssociationTestJSON-1970391969-project-member] Got 1 possible topologies {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 543.044379] env[62096]: DEBUG nova.virt.hardware [None req-530f9427-aadb-4fc8-9c2d-063e58e29f49 tempest-FloatingIPsAssociationTestJSON-1970391969 tempest-FloatingIPsAssociationTestJSON-1970391969-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 543.044548] env[62096]: DEBUG nova.virt.hardware [None req-530f9427-aadb-4fc8-9c2d-063e58e29f49 tempest-FloatingIPsAssociationTestJSON-1970391969 tempest-FloatingIPsAssociationTestJSON-1970391969-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 543.045455] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b64543e8-6144-48dc-846d-708bcfe4862f {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.056019] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66fa1ece-9a5a-4f82-8c4f-64236777d66d {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.326014] env[62096]: DEBUG nova.network.neutron [req-545b6f98-2607-4e93-830b-2fa0c189ba16 req-0062759c-8bab-45e4-b966-f213b088c254 service nova] [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 543.332662] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-f939c6b6-f865-40df-a5aa-fec5b14620ab tempest-InstanceActionsNegativeTestJSON-1063701282 tempest-InstanceActionsNegativeTestJSON-1063701282-project-member] Expecting reply to msg 925aadbf85df481687c400c0a3fc9371 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 543.338828] env[62096]: DEBUG nova.network.neutron [-] [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 543.339226] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 1f59ab8cef774000bf48176bf96ff045 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 543.344645] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 925aadbf85df481687c400c0a3fc9371 [ 543.349830] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1f59ab8cef774000bf48176bf96ff045 [ 543.373578] env[62096]: DEBUG nova.network.neutron [None req-10ce455e-fd8c-4015-ac8b-e4c26a34f016 tempest-ServersTestFqdnHostnames-875224743 tempest-ServersTestFqdnHostnames-875224743-project-member] [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 543.598609] env[62096]: DEBUG nova.network.neutron [None req-10ce455e-fd8c-4015-ac8b-e4c26a34f016 tempest-ServersTestFqdnHostnames-875224743 tempest-ServersTestFqdnHostnames-875224743-project-member] [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 543.599141] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-10ce455e-fd8c-4015-ac8b-e4c26a34f016 tempest-ServersTestFqdnHostnames-875224743 tempest-ServersTestFqdnHostnames-875224743-project-member] Expecting reply to msg 4969013cada648ae82fa031c5bfad724 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 543.608910] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4969013cada648ae82fa031c5bfad724 [ 543.707724] env[62096]: DEBUG nova.network.neutron [req-545b6f98-2607-4e93-830b-2fa0c189ba16 req-0062759c-8bab-45e4-b966-f213b088c254 service nova] [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 543.708303] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-545b6f98-2607-4e93-830b-2fa0c189ba16 req-0062759c-8bab-45e4-b966-f213b088c254 service nova] Expecting reply to msg 0ee59abf61894b519250fa3a53d6a6e4 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 543.719610] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0ee59abf61894b519250fa3a53d6a6e4 [ 543.846563] env[62096]: INFO nova.compute.manager [-] [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] Took 1.04 seconds to deallocate network for instance. [ 543.849123] env[62096]: DEBUG nova.compute.claims [None req-7dd77095-b2e2-4af3-8029-5e47b60be2ee tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] Aborting claim: {{(pid=62096) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 543.849320] env[62096]: DEBUG oslo_concurrency.lockutils [None req-7dd77095-b2e2-4af3-8029-5e47b60be2ee tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 544.020651] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01856442-b37b-40c6-a0fc-54831ecd9cef {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.028807] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dba88382-33bb-4e97-bc26-f72a424e8376 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.061167] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33e1dc45-deaf-4877-8949-fd210b72c765 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.068452] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-552c4688-db45-4664-923b-bc3b19ccc13f {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.083118] env[62096]: DEBUG nova.compute.provider_tree [None req-f939c6b6-f865-40df-a5aa-fec5b14620ab tempest-InstanceActionsNegativeTestJSON-1063701282 tempest-InstanceActionsNegativeTestJSON-1063701282-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 544.083628] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-f939c6b6-f865-40df-a5aa-fec5b14620ab tempest-InstanceActionsNegativeTestJSON-1063701282 tempest-InstanceActionsNegativeTestJSON-1063701282-project-member] Expecting reply to msg de3f36d4d3be480d84f16107177d5854 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 544.090675] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg de3f36d4d3be480d84f16107177d5854 [ 544.101330] env[62096]: DEBUG oslo_concurrency.lockutils [None req-10ce455e-fd8c-4015-ac8b-e4c26a34f016 tempest-ServersTestFqdnHostnames-875224743 tempest-ServersTestFqdnHostnames-875224743-project-member] Releasing lock "refresh_cache-2e0f116a-e8d2-45b5-bb03-624a822fb4d2" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 544.101613] env[62096]: DEBUG nova.compute.manager [None req-10ce455e-fd8c-4015-ac8b-e4c26a34f016 tempest-ServersTestFqdnHostnames-875224743 tempest-ServersTestFqdnHostnames-875224743-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 544.101798] env[62096]: DEBUG nova.compute.manager [None req-10ce455e-fd8c-4015-ac8b-e4c26a34f016 tempest-ServersTestFqdnHostnames-875224743 tempest-ServersTestFqdnHostnames-875224743-project-member] [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 544.101952] env[62096]: DEBUG nova.network.neutron [None req-10ce455e-fd8c-4015-ac8b-e4c26a34f016 tempest-ServersTestFqdnHostnames-875224743 tempest-ServersTestFqdnHostnames-875224743-project-member] [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 544.133898] env[62096]: DEBUG nova.network.neutron [None req-10ce455e-fd8c-4015-ac8b-e4c26a34f016 tempest-ServersTestFqdnHostnames-875224743 tempest-ServersTestFqdnHostnames-875224743-project-member] [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 544.133898] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-10ce455e-fd8c-4015-ac8b-e4c26a34f016 tempest-ServersTestFqdnHostnames-875224743 tempest-ServersTestFqdnHostnames-875224743-project-member] Expecting reply to msg 8e4f9b36206540b9ad38e71bfe7cf97f in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 544.141678] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8e4f9b36206540b9ad38e71bfe7cf97f [ 544.164116] env[62096]: ERROR nova.compute.manager [None req-4495e51c-6f81-4215-8c28-fb72754e88b3 tempest-ServersAdminNegativeTestJSON-706848671 tempest-ServersAdminNegativeTestJSON-706848671-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port cc3376b5-d3cb-420a-804b-c6a03e40dccd, please check neutron logs for more information. [ 544.164116] env[62096]: ERROR nova.compute.manager Traceback (most recent call last): [ 544.164116] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 544.164116] env[62096]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 544.164116] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 544.164116] env[62096]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 544.164116] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 544.164116] env[62096]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 544.164116] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 544.164116] env[62096]: ERROR nova.compute.manager self.force_reraise() [ 544.164116] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 544.164116] env[62096]: ERROR nova.compute.manager raise self.value [ 544.164116] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 544.164116] env[62096]: ERROR nova.compute.manager updated_port = self._update_port( [ 544.164116] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 544.164116] env[62096]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 544.164585] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 544.164585] env[62096]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 544.164585] env[62096]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port cc3376b5-d3cb-420a-804b-c6a03e40dccd, please check neutron logs for more information. [ 544.164585] env[62096]: ERROR nova.compute.manager [ 544.164585] env[62096]: Traceback (most recent call last): [ 544.164585] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 544.164585] env[62096]: listener.cb(fileno) [ 544.164585] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 544.164585] env[62096]: result = function(*args, **kwargs) [ 544.164585] env[62096]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 544.164585] env[62096]: return func(*args, **kwargs) [ 544.164585] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 544.164585] env[62096]: raise e [ 544.164585] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 544.164585] env[62096]: nwinfo = self.network_api.allocate_for_instance( [ 544.164585] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 544.164585] env[62096]: created_port_ids = self._update_ports_for_instance( [ 544.164585] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 544.164585] env[62096]: with excutils.save_and_reraise_exception(): [ 544.164585] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 544.164585] env[62096]: self.force_reraise() [ 544.164585] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 544.164585] env[62096]: raise self.value [ 544.164585] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 544.164585] env[62096]: updated_port = self._update_port( [ 544.164585] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 544.164585] env[62096]: _ensure_no_port_binding_failure(port) [ 544.164585] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 544.164585] env[62096]: raise exception.PortBindingFailed(port_id=port['id']) [ 544.165296] env[62096]: nova.exception.PortBindingFailed: Binding failed for port cc3376b5-d3cb-420a-804b-c6a03e40dccd, please check neutron logs for more information. [ 544.165296] env[62096]: Removing descriptor: 19 [ 544.165296] env[62096]: ERROR nova.compute.manager [None req-4495e51c-6f81-4215-8c28-fb72754e88b3 tempest-ServersAdminNegativeTestJSON-706848671 tempest-ServersAdminNegativeTestJSON-706848671-project-member] [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port cc3376b5-d3cb-420a-804b-c6a03e40dccd, please check neutron logs for more information. [ 544.165296] env[62096]: ERROR nova.compute.manager [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] Traceback (most recent call last): [ 544.165296] env[62096]: ERROR nova.compute.manager [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 544.165296] env[62096]: ERROR nova.compute.manager [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] yield resources [ 544.165296] env[62096]: ERROR nova.compute.manager [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 544.165296] env[62096]: ERROR nova.compute.manager [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] self.driver.spawn(context, instance, image_meta, [ 544.165296] env[62096]: ERROR nova.compute.manager [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 544.165296] env[62096]: ERROR nova.compute.manager [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 544.165296] env[62096]: ERROR nova.compute.manager [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 544.165296] env[62096]: ERROR nova.compute.manager [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] vm_ref = self.build_virtual_machine(instance, [ 544.165602] env[62096]: ERROR nova.compute.manager [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 544.165602] env[62096]: ERROR nova.compute.manager [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] vif_infos = vmwarevif.get_vif_info(self._session, [ 544.165602] env[62096]: ERROR nova.compute.manager [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 544.165602] env[62096]: ERROR nova.compute.manager [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] for vif in network_info: [ 544.165602] env[62096]: ERROR nova.compute.manager [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 544.165602] env[62096]: ERROR nova.compute.manager [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] return self._sync_wrapper(fn, *args, **kwargs) [ 544.165602] env[62096]: ERROR nova.compute.manager [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 544.165602] env[62096]: ERROR nova.compute.manager [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] self.wait() [ 544.165602] env[62096]: ERROR nova.compute.manager [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 544.165602] env[62096]: ERROR nova.compute.manager [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] self[:] = self._gt.wait() [ 544.165602] env[62096]: ERROR nova.compute.manager [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 544.165602] env[62096]: ERROR nova.compute.manager [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] return self._exit_event.wait() [ 544.165602] env[62096]: ERROR nova.compute.manager [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 544.165925] env[62096]: ERROR nova.compute.manager [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] result = hub.switch() [ 544.165925] env[62096]: ERROR nova.compute.manager [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 544.165925] env[62096]: ERROR nova.compute.manager [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] return self.greenlet.switch() [ 544.165925] env[62096]: ERROR nova.compute.manager [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 544.165925] env[62096]: ERROR nova.compute.manager [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] result = function(*args, **kwargs) [ 544.165925] env[62096]: ERROR nova.compute.manager [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 544.165925] env[62096]: ERROR nova.compute.manager [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] return func(*args, **kwargs) [ 544.165925] env[62096]: ERROR nova.compute.manager [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 544.165925] env[62096]: ERROR nova.compute.manager [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] raise e [ 544.165925] env[62096]: ERROR nova.compute.manager [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 544.165925] env[62096]: ERROR nova.compute.manager [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] nwinfo = self.network_api.allocate_for_instance( [ 544.165925] env[62096]: ERROR nova.compute.manager [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 544.165925] env[62096]: ERROR nova.compute.manager [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] created_port_ids = self._update_ports_for_instance( [ 544.166234] env[62096]: ERROR nova.compute.manager [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 544.166234] env[62096]: ERROR nova.compute.manager [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] with excutils.save_and_reraise_exception(): [ 544.166234] env[62096]: ERROR nova.compute.manager [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 544.166234] env[62096]: ERROR nova.compute.manager [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] self.force_reraise() [ 544.166234] env[62096]: ERROR nova.compute.manager [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 544.166234] env[62096]: ERROR nova.compute.manager [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] raise self.value [ 544.166234] env[62096]: ERROR nova.compute.manager [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 544.166234] env[62096]: ERROR nova.compute.manager [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] updated_port = self._update_port( [ 544.166234] env[62096]: ERROR nova.compute.manager [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 544.166234] env[62096]: ERROR nova.compute.manager [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] _ensure_no_port_binding_failure(port) [ 544.166234] env[62096]: ERROR nova.compute.manager [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 544.166234] env[62096]: ERROR nova.compute.manager [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] raise exception.PortBindingFailed(port_id=port['id']) [ 544.166518] env[62096]: ERROR nova.compute.manager [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] nova.exception.PortBindingFailed: Binding failed for port cc3376b5-d3cb-420a-804b-c6a03e40dccd, please check neutron logs for more information. [ 544.166518] env[62096]: ERROR nova.compute.manager [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] [ 544.166518] env[62096]: INFO nova.compute.manager [None req-4495e51c-6f81-4215-8c28-fb72754e88b3 tempest-ServersAdminNegativeTestJSON-706848671 tempest-ServersAdminNegativeTestJSON-706848671-project-member] [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] Terminating instance [ 544.167403] env[62096]: DEBUG oslo_concurrency.lockutils [None req-4495e51c-6f81-4215-8c28-fb72754e88b3 tempest-ServersAdminNegativeTestJSON-706848671 tempest-ServersAdminNegativeTestJSON-706848671-project-member] Acquiring lock "refresh_cache-ebc7dc27-110e-4edc-87be-cb0ecdbe215f" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 544.167403] env[62096]: DEBUG oslo_concurrency.lockutils [None req-4495e51c-6f81-4215-8c28-fb72754e88b3 tempest-ServersAdminNegativeTestJSON-706848671 tempest-ServersAdminNegativeTestJSON-706848671-project-member] Acquired lock "refresh_cache-ebc7dc27-110e-4edc-87be-cb0ecdbe215f" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 544.168037] env[62096]: DEBUG nova.network.neutron [None req-4495e51c-6f81-4215-8c28-fb72754e88b3 tempest-ServersAdminNegativeTestJSON-706848671 tempest-ServersAdminNegativeTestJSON-706848671-project-member] [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 544.168463] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4495e51c-6f81-4215-8c28-fb72754e88b3 tempest-ServersAdminNegativeTestJSON-706848671 tempest-ServersAdminNegativeTestJSON-706848671-project-member] Expecting reply to msg c542776b167247cf8eaa2de50ecd8294 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 544.175655] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c542776b167247cf8eaa2de50ecd8294 [ 544.211377] env[62096]: DEBUG oslo_concurrency.lockutils [req-545b6f98-2607-4e93-830b-2fa0c189ba16 req-0062759c-8bab-45e4-b966-f213b088c254 service nova] Releasing lock "refresh_cache-5ea9e99b-dffd-4b11-97ff-634551c00745" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 544.258619] env[62096]: ERROR nova.compute.manager [None req-530f9427-aadb-4fc8-9c2d-063e58e29f49 tempest-FloatingIPsAssociationTestJSON-1970391969 tempest-FloatingIPsAssociationTestJSON-1970391969-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 76965fb1-e16e-43bd-854f-ab0f78dc111e, please check neutron logs for more information. [ 544.258619] env[62096]: ERROR nova.compute.manager Traceback (most recent call last): [ 544.258619] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 544.258619] env[62096]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 544.258619] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 544.258619] env[62096]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 544.258619] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 544.258619] env[62096]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 544.258619] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 544.258619] env[62096]: ERROR nova.compute.manager self.force_reraise() [ 544.258619] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 544.258619] env[62096]: ERROR nova.compute.manager raise self.value [ 544.258619] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 544.258619] env[62096]: ERROR nova.compute.manager updated_port = self._update_port( [ 544.258619] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 544.258619] env[62096]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 544.259224] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 544.259224] env[62096]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 544.259224] env[62096]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 76965fb1-e16e-43bd-854f-ab0f78dc111e, please check neutron logs for more information. [ 544.259224] env[62096]: ERROR nova.compute.manager [ 544.259224] env[62096]: Traceback (most recent call last): [ 544.259224] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 544.259224] env[62096]: listener.cb(fileno) [ 544.259224] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 544.259224] env[62096]: result = function(*args, **kwargs) [ 544.259224] env[62096]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 544.259224] env[62096]: return func(*args, **kwargs) [ 544.259224] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 544.259224] env[62096]: raise e [ 544.259224] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 544.259224] env[62096]: nwinfo = self.network_api.allocate_for_instance( [ 544.259224] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 544.259224] env[62096]: created_port_ids = self._update_ports_for_instance( [ 544.259224] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 544.259224] env[62096]: with excutils.save_and_reraise_exception(): [ 544.259224] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 544.259224] env[62096]: self.force_reraise() [ 544.259224] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 544.259224] env[62096]: raise self.value [ 544.259224] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 544.259224] env[62096]: updated_port = self._update_port( [ 544.259224] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 544.259224] env[62096]: _ensure_no_port_binding_failure(port) [ 544.259224] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 544.259224] env[62096]: raise exception.PortBindingFailed(port_id=port['id']) [ 544.260049] env[62096]: nova.exception.PortBindingFailed: Binding failed for port 76965fb1-e16e-43bd-854f-ab0f78dc111e, please check neutron logs for more information. [ 544.260049] env[62096]: Removing descriptor: 14 [ 544.260049] env[62096]: ERROR nova.compute.manager [None req-530f9427-aadb-4fc8-9c2d-063e58e29f49 tempest-FloatingIPsAssociationTestJSON-1970391969 tempest-FloatingIPsAssociationTestJSON-1970391969-project-member] [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 76965fb1-e16e-43bd-854f-ab0f78dc111e, please check neutron logs for more information. [ 544.260049] env[62096]: ERROR nova.compute.manager [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] Traceback (most recent call last): [ 544.260049] env[62096]: ERROR nova.compute.manager [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 544.260049] env[62096]: ERROR nova.compute.manager [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] yield resources [ 544.260049] env[62096]: ERROR nova.compute.manager [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 544.260049] env[62096]: ERROR nova.compute.manager [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] self.driver.spawn(context, instance, image_meta, [ 544.260049] env[62096]: ERROR nova.compute.manager [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 544.260049] env[62096]: ERROR nova.compute.manager [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 544.260049] env[62096]: ERROR nova.compute.manager [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 544.260049] env[62096]: ERROR nova.compute.manager [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] vm_ref = self.build_virtual_machine(instance, [ 544.260387] env[62096]: ERROR nova.compute.manager [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 544.260387] env[62096]: ERROR nova.compute.manager [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] vif_infos = vmwarevif.get_vif_info(self._session, [ 544.260387] env[62096]: ERROR nova.compute.manager [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 544.260387] env[62096]: ERROR nova.compute.manager [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] for vif in network_info: [ 544.260387] env[62096]: ERROR nova.compute.manager [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 544.260387] env[62096]: ERROR nova.compute.manager [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] return self._sync_wrapper(fn, *args, **kwargs) [ 544.260387] env[62096]: ERROR nova.compute.manager [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 544.260387] env[62096]: ERROR nova.compute.manager [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] self.wait() [ 544.260387] env[62096]: ERROR nova.compute.manager [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 544.260387] env[62096]: ERROR nova.compute.manager [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] self[:] = self._gt.wait() [ 544.260387] env[62096]: ERROR nova.compute.manager [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 544.260387] env[62096]: ERROR nova.compute.manager [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] return self._exit_event.wait() [ 544.260387] env[62096]: ERROR nova.compute.manager [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 544.260784] env[62096]: ERROR nova.compute.manager [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] result = hub.switch() [ 544.260784] env[62096]: ERROR nova.compute.manager [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 544.260784] env[62096]: ERROR nova.compute.manager [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] return self.greenlet.switch() [ 544.260784] env[62096]: ERROR nova.compute.manager [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 544.260784] env[62096]: ERROR nova.compute.manager [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] result = function(*args, **kwargs) [ 544.260784] env[62096]: ERROR nova.compute.manager [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 544.260784] env[62096]: ERROR nova.compute.manager [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] return func(*args, **kwargs) [ 544.260784] env[62096]: ERROR nova.compute.manager [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 544.260784] env[62096]: ERROR nova.compute.manager [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] raise e [ 544.260784] env[62096]: ERROR nova.compute.manager [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 544.260784] env[62096]: ERROR nova.compute.manager [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] nwinfo = self.network_api.allocate_for_instance( [ 544.260784] env[62096]: ERROR nova.compute.manager [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 544.260784] env[62096]: ERROR nova.compute.manager [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] created_port_ids = self._update_ports_for_instance( [ 544.261163] env[62096]: ERROR nova.compute.manager [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 544.261163] env[62096]: ERROR nova.compute.manager [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] with excutils.save_and_reraise_exception(): [ 544.261163] env[62096]: ERROR nova.compute.manager [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 544.261163] env[62096]: ERROR nova.compute.manager [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] self.force_reraise() [ 544.261163] env[62096]: ERROR nova.compute.manager [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 544.261163] env[62096]: ERROR nova.compute.manager [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] raise self.value [ 544.261163] env[62096]: ERROR nova.compute.manager [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 544.261163] env[62096]: ERROR nova.compute.manager [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] updated_port = self._update_port( [ 544.261163] env[62096]: ERROR nova.compute.manager [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 544.261163] env[62096]: ERROR nova.compute.manager [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] _ensure_no_port_binding_failure(port) [ 544.261163] env[62096]: ERROR nova.compute.manager [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 544.261163] env[62096]: ERROR nova.compute.manager [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] raise exception.PortBindingFailed(port_id=port['id']) [ 544.261502] env[62096]: ERROR nova.compute.manager [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] nova.exception.PortBindingFailed: Binding failed for port 76965fb1-e16e-43bd-854f-ab0f78dc111e, please check neutron logs for more information. [ 544.261502] env[62096]: ERROR nova.compute.manager [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] [ 544.261502] env[62096]: INFO nova.compute.manager [None req-530f9427-aadb-4fc8-9c2d-063e58e29f49 tempest-FloatingIPsAssociationTestJSON-1970391969 tempest-FloatingIPsAssociationTestJSON-1970391969-project-member] [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] Terminating instance [ 544.261963] env[62096]: DEBUG oslo_concurrency.lockutils [None req-530f9427-aadb-4fc8-9c2d-063e58e29f49 tempest-FloatingIPsAssociationTestJSON-1970391969 tempest-FloatingIPsAssociationTestJSON-1970391969-project-member] Acquiring lock "refresh_cache-8001fc1b-fa8c-496d-acc8-922cfd5d5cc2" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 544.262119] env[62096]: DEBUG oslo_concurrency.lockutils [None req-530f9427-aadb-4fc8-9c2d-063e58e29f49 tempest-FloatingIPsAssociationTestJSON-1970391969 tempest-FloatingIPsAssociationTestJSON-1970391969-project-member] Acquired lock "refresh_cache-8001fc1b-fa8c-496d-acc8-922cfd5d5cc2" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 544.262750] env[62096]: DEBUG nova.network.neutron [None req-530f9427-aadb-4fc8-9c2d-063e58e29f49 tempest-FloatingIPsAssociationTestJSON-1970391969 tempest-FloatingIPsAssociationTestJSON-1970391969-project-member] [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 544.262750] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-530f9427-aadb-4fc8-9c2d-063e58e29f49 tempest-FloatingIPsAssociationTestJSON-1970391969 tempest-FloatingIPsAssociationTestJSON-1970391969-project-member] Expecting reply to msg 1afc760af63c412282d8ee6312a7a0cf in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 544.269281] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1afc760af63c412282d8ee6312a7a0cf [ 544.587647] env[62096]: DEBUG nova.scheduler.client.report [None req-f939c6b6-f865-40df-a5aa-fec5b14620ab tempest-InstanceActionsNegativeTestJSON-1063701282 tempest-InstanceActionsNegativeTestJSON-1063701282-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 544.589970] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-f939c6b6-f865-40df-a5aa-fec5b14620ab tempest-InstanceActionsNegativeTestJSON-1063701282 tempest-InstanceActionsNegativeTestJSON-1063701282-project-member] Expecting reply to msg ffea200951aa4b0e9eec1c76fbb3b9ba in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 544.601613] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ffea200951aa4b0e9eec1c76fbb3b9ba [ 544.642186] env[62096]: DEBUG nova.network.neutron [None req-10ce455e-fd8c-4015-ac8b-e4c26a34f016 tempest-ServersTestFqdnHostnames-875224743 tempest-ServersTestFqdnHostnames-875224743-project-member] [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 544.642689] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-10ce455e-fd8c-4015-ac8b-e4c26a34f016 tempest-ServersTestFqdnHostnames-875224743 tempest-ServersTestFqdnHostnames-875224743-project-member] Expecting reply to msg 0787dbfcbbd14070888c7acfc1af98f3 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 544.651913] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0787dbfcbbd14070888c7acfc1af98f3 [ 544.711244] env[62096]: DEBUG nova.network.neutron [None req-4495e51c-6f81-4215-8c28-fb72754e88b3 tempest-ServersAdminNegativeTestJSON-706848671 tempest-ServersAdminNegativeTestJSON-706848671-project-member] [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 544.793180] env[62096]: DEBUG nova.network.neutron [None req-530f9427-aadb-4fc8-9c2d-063e58e29f49 tempest-FloatingIPsAssociationTestJSON-1970391969 tempest-FloatingIPsAssociationTestJSON-1970391969-project-member] [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 544.875078] env[62096]: DEBUG nova.compute.manager [req-f8172ba5-a68c-49b7-b9de-f3772bb49177 req-d8bdb62e-3cf9-4d0b-98bd-602ccccd32eb service nova] [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] Received event network-changed-cc3376b5-d3cb-420a-804b-c6a03e40dccd {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 544.875314] env[62096]: DEBUG nova.compute.manager [req-f8172ba5-a68c-49b7-b9de-f3772bb49177 req-d8bdb62e-3cf9-4d0b-98bd-602ccccd32eb service nova] [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] Refreshing instance network info cache due to event network-changed-cc3376b5-d3cb-420a-804b-c6a03e40dccd. {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 544.875421] env[62096]: DEBUG oslo_concurrency.lockutils [req-f8172ba5-a68c-49b7-b9de-f3772bb49177 req-d8bdb62e-3cf9-4d0b-98bd-602ccccd32eb service nova] Acquiring lock "refresh_cache-ebc7dc27-110e-4edc-87be-cb0ecdbe215f" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 544.931919] env[62096]: DEBUG nova.network.neutron [None req-4495e51c-6f81-4215-8c28-fb72754e88b3 tempest-ServersAdminNegativeTestJSON-706848671 tempest-ServersAdminNegativeTestJSON-706848671-project-member] [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 544.932623] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4495e51c-6f81-4215-8c28-fb72754e88b3 tempest-ServersAdminNegativeTestJSON-706848671 tempest-ServersAdminNegativeTestJSON-706848671-project-member] Expecting reply to msg 60352a30b82642489163fa8ec7264b8e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 544.940355] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 60352a30b82642489163fa8ec7264b8e [ 545.102812] env[62096]: DEBUG oslo_concurrency.lockutils [None req-f939c6b6-f865-40df-a5aa-fec5b14620ab tempest-InstanceActionsNegativeTestJSON-1063701282 tempest-InstanceActionsNegativeTestJSON-1063701282-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.268s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 545.102812] env[62096]: DEBUG nova.compute.manager [None req-f939c6b6-f865-40df-a5aa-fec5b14620ab tempest-InstanceActionsNegativeTestJSON-1063701282 tempest-InstanceActionsNegativeTestJSON-1063701282-project-member] [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] Start building networks asynchronously for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 545.102812] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-f939c6b6-f865-40df-a5aa-fec5b14620ab tempest-InstanceActionsNegativeTestJSON-1063701282 tempest-InstanceActionsNegativeTestJSON-1063701282-project-member] Expecting reply to msg b82d759011d8444e8ed29981877d9338 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 545.102812] env[62096]: DEBUG oslo_concurrency.lockutils [None req-212fcd2e-ae1a-4ce1-9ad3-08ffa272db5b tempest-AttachInterfacesV270Test-1431958456 tempest-AttachInterfacesV270Test-1431958456-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 9.628s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 545.102812] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-212fcd2e-ae1a-4ce1-9ad3-08ffa272db5b tempest-AttachInterfacesV270Test-1431958456 tempest-AttachInterfacesV270Test-1431958456-project-member] Expecting reply to msg 6a0e66a958bf478b836e5783838d2b42 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 545.134883] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b82d759011d8444e8ed29981877d9338 [ 545.136018] env[62096]: DEBUG nova.network.neutron [None req-530f9427-aadb-4fc8-9c2d-063e58e29f49 tempest-FloatingIPsAssociationTestJSON-1970391969 tempest-FloatingIPsAssociationTestJSON-1970391969-project-member] [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 545.136567] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-530f9427-aadb-4fc8-9c2d-063e58e29f49 tempest-FloatingIPsAssociationTestJSON-1970391969 tempest-FloatingIPsAssociationTestJSON-1970391969-project-member] Expecting reply to msg be87e92f8f1140aa9c132e7cab0bb110 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 545.146749] env[62096]: INFO nova.compute.manager [None req-10ce455e-fd8c-4015-ac8b-e4c26a34f016 tempest-ServersTestFqdnHostnames-875224743 tempest-ServersTestFqdnHostnames-875224743-project-member] [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] Took 1.04 seconds to deallocate network for instance. [ 545.148991] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-10ce455e-fd8c-4015-ac8b-e4c26a34f016 tempest-ServersTestFqdnHostnames-875224743 tempest-ServersTestFqdnHostnames-875224743-project-member] Expecting reply to msg d6c67a77ca2447e0885472b66b0fdfe6 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 545.149693] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg be87e92f8f1140aa9c132e7cab0bb110 [ 545.174820] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6a0e66a958bf478b836e5783838d2b42 [ 545.203902] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d6c67a77ca2447e0885472b66b0fdfe6 [ 545.435864] env[62096]: DEBUG oslo_concurrency.lockutils [None req-4495e51c-6f81-4215-8c28-fb72754e88b3 tempest-ServersAdminNegativeTestJSON-706848671 tempest-ServersAdminNegativeTestJSON-706848671-project-member] Releasing lock "refresh_cache-ebc7dc27-110e-4edc-87be-cb0ecdbe215f" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 545.436302] env[62096]: DEBUG nova.compute.manager [None req-4495e51c-6f81-4215-8c28-fb72754e88b3 tempest-ServersAdminNegativeTestJSON-706848671 tempest-ServersAdminNegativeTestJSON-706848671-project-member] [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] Start destroying the instance on the hypervisor. {{(pid=62096) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 545.436493] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-4495e51c-6f81-4215-8c28-fb72754e88b3 tempest-ServersAdminNegativeTestJSON-706848671 tempest-ServersAdminNegativeTestJSON-706848671-project-member] [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] Destroying instance {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 545.437153] env[62096]: DEBUG oslo_concurrency.lockutils [req-f8172ba5-a68c-49b7-b9de-f3772bb49177 req-d8bdb62e-3cf9-4d0b-98bd-602ccccd32eb service nova] Acquired lock "refresh_cache-ebc7dc27-110e-4edc-87be-cb0ecdbe215f" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 545.437321] env[62096]: DEBUG nova.network.neutron [req-f8172ba5-a68c-49b7-b9de-f3772bb49177 req-d8bdb62e-3cf9-4d0b-98bd-602ccccd32eb service nova] [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] Refreshing network info cache for port cc3376b5-d3cb-420a-804b-c6a03e40dccd {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 545.437749] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-f8172ba5-a68c-49b7-b9de-f3772bb49177 req-d8bdb62e-3cf9-4d0b-98bd-602ccccd32eb service nova] Expecting reply to msg 9d15ff02f5b6468d8e5f57ff9454b607 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 545.438527] env[62096]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ee3de205-44f1-4340-9d85-067ebef1dee0 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.445266] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9d15ff02f5b6468d8e5f57ff9454b607 [ 545.448248] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b3506ea-8a96-4868-a5fd-faaafb0ba368 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.470928] env[62096]: WARNING nova.virt.vmwareapi.vmops [None req-4495e51c-6f81-4215-8c28-fb72754e88b3 tempest-ServersAdminNegativeTestJSON-706848671 tempest-ServersAdminNegativeTestJSON-706848671-project-member] [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance ebc7dc27-110e-4edc-87be-cb0ecdbe215f could not be found. [ 545.471146] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-4495e51c-6f81-4215-8c28-fb72754e88b3 tempest-ServersAdminNegativeTestJSON-706848671 tempest-ServersAdminNegativeTestJSON-706848671-project-member] [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] Instance destroyed {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 545.471318] env[62096]: INFO nova.compute.manager [None req-4495e51c-6f81-4215-8c28-fb72754e88b3 tempest-ServersAdminNegativeTestJSON-706848671 tempest-ServersAdminNegativeTestJSON-706848671-project-member] [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] Took 0.03 seconds to destroy the instance on the hypervisor. [ 545.471591] env[62096]: DEBUG oslo.service.loopingcall [None req-4495e51c-6f81-4215-8c28-fb72754e88b3 tempest-ServersAdminNegativeTestJSON-706848671 tempest-ServersAdminNegativeTestJSON-706848671-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 545.471764] env[62096]: DEBUG nova.compute.manager [-] [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 545.471858] env[62096]: DEBUG nova.network.neutron [-] [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 545.569938] env[62096]: DEBUG nova.network.neutron [-] [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 545.569938] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg c9918c808f3141b3a257094442823eec in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 545.581220] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c9918c808f3141b3a257094442823eec [ 545.605360] env[62096]: DEBUG nova.compute.utils [None req-f939c6b6-f865-40df-a5aa-fec5b14620ab tempest-InstanceActionsNegativeTestJSON-1063701282 tempest-InstanceActionsNegativeTestJSON-1063701282-project-member] Using /dev/sd instead of None {{(pid=62096) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 545.606411] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-f939c6b6-f865-40df-a5aa-fec5b14620ab tempest-InstanceActionsNegativeTestJSON-1063701282 tempest-InstanceActionsNegativeTestJSON-1063701282-project-member] Expecting reply to msg 712bdb091beb4f8c8443c3d1f7a4c6a2 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 545.614126] env[62096]: DEBUG nova.compute.manager [None req-f939c6b6-f865-40df-a5aa-fec5b14620ab tempest-InstanceActionsNegativeTestJSON-1063701282 tempest-InstanceActionsNegativeTestJSON-1063701282-project-member] [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] Allocating IP information in the background. {{(pid=62096) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 545.614310] env[62096]: DEBUG nova.network.neutron [None req-f939c6b6-f865-40df-a5aa-fec5b14620ab tempest-InstanceActionsNegativeTestJSON-1063701282 tempest-InstanceActionsNegativeTestJSON-1063701282-project-member] [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] allocate_for_instance() {{(pid=62096) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 545.623082] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 712bdb091beb4f8c8443c3d1f7a4c6a2 [ 545.641610] env[62096]: DEBUG oslo_concurrency.lockutils [None req-530f9427-aadb-4fc8-9c2d-063e58e29f49 tempest-FloatingIPsAssociationTestJSON-1970391969 tempest-FloatingIPsAssociationTestJSON-1970391969-project-member] Releasing lock "refresh_cache-8001fc1b-fa8c-496d-acc8-922cfd5d5cc2" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 545.641610] env[62096]: DEBUG nova.compute.manager [None req-530f9427-aadb-4fc8-9c2d-063e58e29f49 tempest-FloatingIPsAssociationTestJSON-1970391969 tempest-FloatingIPsAssociationTestJSON-1970391969-project-member] [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] Start destroying the instance on the hypervisor. {{(pid=62096) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 545.641610] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-530f9427-aadb-4fc8-9c2d-063e58e29f49 tempest-FloatingIPsAssociationTestJSON-1970391969 tempest-FloatingIPsAssociationTestJSON-1970391969-project-member] [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] Destroying instance {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 545.641610] env[62096]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-824e61a5-29dd-4553-b849-17d1395913f8 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.654744] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4d60fbb-58d6-4753-b0af-8425e1322775 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.674179] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-10ce455e-fd8c-4015-ac8b-e4c26a34f016 tempest-ServersTestFqdnHostnames-875224743 tempest-ServersTestFqdnHostnames-875224743-project-member] Expecting reply to msg c42ece064e3f450080e99c3792a41753 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 545.701768] env[62096]: WARNING nova.virt.vmwareapi.vmops [None req-530f9427-aadb-4fc8-9c2d-063e58e29f49 tempest-FloatingIPsAssociationTestJSON-1970391969 tempest-FloatingIPsAssociationTestJSON-1970391969-project-member] [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2 could not be found. [ 545.702052] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-530f9427-aadb-4fc8-9c2d-063e58e29f49 tempest-FloatingIPsAssociationTestJSON-1970391969 tempest-FloatingIPsAssociationTestJSON-1970391969-project-member] [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] Instance destroyed {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 545.702402] env[62096]: INFO nova.compute.manager [None req-530f9427-aadb-4fc8-9c2d-063e58e29f49 tempest-FloatingIPsAssociationTestJSON-1970391969 tempest-FloatingIPsAssociationTestJSON-1970391969-project-member] [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] Took 0.06 seconds to destroy the instance on the hypervisor. [ 545.702948] env[62096]: DEBUG oslo.service.loopingcall [None req-530f9427-aadb-4fc8-9c2d-063e58e29f49 tempest-FloatingIPsAssociationTestJSON-1970391969 tempest-FloatingIPsAssociationTestJSON-1970391969-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 545.705970] env[62096]: DEBUG nova.compute.manager [-] [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 545.706110] env[62096]: DEBUG nova.network.neutron [-] [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 545.732009] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c42ece064e3f450080e99c3792a41753 [ 545.738025] env[62096]: DEBUG nova.network.neutron [-] [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 545.738561] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 97e71f36ad0a40f99b91828588b4c4c8 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 545.745569] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 97e71f36ad0a40f99b91828588b4c4c8 [ 545.758008] env[62096]: DEBUG nova.policy [None req-f939c6b6-f865-40df-a5aa-fec5b14620ab tempest-InstanceActionsNegativeTestJSON-1063701282 tempest-InstanceActionsNegativeTestJSON-1063701282-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '20ffb9c43b824b718c22f9825f9593bf', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b33365e5ea894ae6bfd0c35a7835bed9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62096) authorize /opt/stack/nova/nova/policy.py:203}} [ 545.869155] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7087e60d-7651-4f9d-93ff-6fc206911075 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.880163] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82bb09d0-920f-4af5-bca3-48968ce71b8c {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.913865] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04d687aa-4ef8-4ed3-9593-f02160577b8d {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.921262] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecf0a46d-0d2d-48f9-8439-2a0f5f0dc1e7 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.935538] env[62096]: DEBUG nova.compute.provider_tree [None req-212fcd2e-ae1a-4ce1-9ad3-08ffa272db5b tempest-AttachInterfacesV270Test-1431958456 tempest-AttachInterfacesV270Test-1431958456-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 545.936062] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-212fcd2e-ae1a-4ce1-9ad3-08ffa272db5b tempest-AttachInterfacesV270Test-1431958456 tempest-AttachInterfacesV270Test-1431958456-project-member] Expecting reply to msg df4a8145c0604d28884b0f2c3cca28f6 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 545.944652] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg df4a8145c0604d28884b0f2c3cca28f6 [ 546.013208] env[62096]: DEBUG nova.network.neutron [req-f8172ba5-a68c-49b7-b9de-f3772bb49177 req-d8bdb62e-3cf9-4d0b-98bd-602ccccd32eb service nova] [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 546.072545] env[62096]: DEBUG nova.network.neutron [-] [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 546.072545] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg dcee5bb6f2484bcb9b575c0aab777d28 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 546.084992] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dcee5bb6f2484bcb9b575c0aab777d28 [ 546.117416] env[62096]: DEBUG nova.compute.manager [None req-f939c6b6-f865-40df-a5aa-fec5b14620ab tempest-InstanceActionsNegativeTestJSON-1063701282 tempest-InstanceActionsNegativeTestJSON-1063701282-project-member] [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] Start building block device mappings for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 546.119223] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-f939c6b6-f865-40df-a5aa-fec5b14620ab tempest-InstanceActionsNegativeTestJSON-1063701282 tempest-InstanceActionsNegativeTestJSON-1063701282-project-member] Expecting reply to msg 03a01d5e997143b6a25ea864a02fd145 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 546.177469] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 03a01d5e997143b6a25ea864a02fd145 [ 546.182550] env[62096]: DEBUG nova.network.neutron [req-f8172ba5-a68c-49b7-b9de-f3772bb49177 req-d8bdb62e-3cf9-4d0b-98bd-602ccccd32eb service nova] [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 546.183122] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-f8172ba5-a68c-49b7-b9de-f3772bb49177 req-d8bdb62e-3cf9-4d0b-98bd-602ccccd32eb service nova] Expecting reply to msg 128db63176464dbb8110faa3b8cd61d4 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 546.205571] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 128db63176464dbb8110faa3b8cd61d4 [ 546.219182] env[62096]: INFO nova.scheduler.client.report [None req-10ce455e-fd8c-4015-ac8b-e4c26a34f016 tempest-ServersTestFqdnHostnames-875224743 tempest-ServersTestFqdnHostnames-875224743-project-member] Deleted allocations for instance 2e0f116a-e8d2-45b5-bb03-624a822fb4d2 [ 546.225055] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-10ce455e-fd8c-4015-ac8b-e4c26a34f016 tempest-ServersTestFqdnHostnames-875224743 tempest-ServersTestFqdnHostnames-875224743-project-member] Expecting reply to msg de59312ab9374e5e88715d6f72d2a3cc in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 546.246811] env[62096]: DEBUG nova.network.neutron [-] [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 546.247641] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg ab78c6b7f239461d9ac0ccb7372deb48 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 546.256261] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg de59312ab9374e5e88715d6f72d2a3cc [ 546.283313] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ab78c6b7f239461d9ac0ccb7372deb48 [ 546.439184] env[62096]: DEBUG nova.scheduler.client.report [None req-212fcd2e-ae1a-4ce1-9ad3-08ffa272db5b tempest-AttachInterfacesV270Test-1431958456 tempest-AttachInterfacesV270Test-1431958456-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 546.441356] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-212fcd2e-ae1a-4ce1-9ad3-08ffa272db5b tempest-AttachInterfacesV270Test-1431958456 tempest-AttachInterfacesV270Test-1431958456-project-member] Expecting reply to msg 7514354620b34f64a2229d334b784b42 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 546.456852] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7514354620b34f64a2229d334b784b42 [ 546.576720] env[62096]: INFO nova.compute.manager [-] [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] Took 1.10 seconds to deallocate network for instance. [ 546.582409] env[62096]: DEBUG nova.compute.claims [None req-4495e51c-6f81-4215-8c28-fb72754e88b3 tempest-ServersAdminNegativeTestJSON-706848671 tempest-ServersAdminNegativeTestJSON-706848671-project-member] [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] Aborting claim: {{(pid=62096) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 546.582409] env[62096]: DEBUG oslo_concurrency.lockutils [None req-4495e51c-6f81-4215-8c28-fb72754e88b3 tempest-ServersAdminNegativeTestJSON-706848671 tempest-ServersAdminNegativeTestJSON-706848671-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 546.624764] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-f939c6b6-f865-40df-a5aa-fec5b14620ab tempest-InstanceActionsNegativeTestJSON-1063701282 tempest-InstanceActionsNegativeTestJSON-1063701282-project-member] Expecting reply to msg ed30d9f4f50746bc8b7b25c4688a5cc6 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 546.680161] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ed30d9f4f50746bc8b7b25c4688a5cc6 [ 546.688984] env[62096]: DEBUG oslo_concurrency.lockutils [req-f8172ba5-a68c-49b7-b9de-f3772bb49177 req-d8bdb62e-3cf9-4d0b-98bd-602ccccd32eb service nova] Releasing lock "refresh_cache-ebc7dc27-110e-4edc-87be-cb0ecdbe215f" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 546.736435] env[62096]: DEBUG oslo_concurrency.lockutils [None req-10ce455e-fd8c-4015-ac8b-e4c26a34f016 tempest-ServersTestFqdnHostnames-875224743 tempest-ServersTestFqdnHostnames-875224743-project-member] Lock "2e0f116a-e8d2-45b5-bb03-624a822fb4d2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.956s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 546.736664] env[62096]: DEBUG oslo_concurrency.lockutils [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Lock "2e0f116a-e8d2-45b5-bb03-624a822fb4d2" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 20.071s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 546.736849] env[62096]: INFO nova.compute.manager [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] [instance: 2e0f116a-e8d2-45b5-bb03-624a822fb4d2] During sync_power_state the instance has a pending task (block_device_mapping). Skip. [ 546.737051] env[62096]: DEBUG oslo_concurrency.lockutils [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Lock "2e0f116a-e8d2-45b5-bb03-624a822fb4d2" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 546.773108] env[62096]: INFO nova.compute.manager [-] [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] Took 1.05 seconds to deallocate network for instance. [ 546.773108] env[62096]: DEBUG nova.compute.claims [None req-530f9427-aadb-4fc8-9c2d-063e58e29f49 tempest-FloatingIPsAssociationTestJSON-1970391969 tempest-FloatingIPsAssociationTestJSON-1970391969-project-member] [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] Aborting claim: {{(pid=62096) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 546.773108] env[62096]: DEBUG oslo_concurrency.lockutils [None req-530f9427-aadb-4fc8-9c2d-063e58e29f49 tempest-FloatingIPsAssociationTestJSON-1970391969 tempest-FloatingIPsAssociationTestJSON-1970391969-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 546.773108] env[62096]: DEBUG oslo_concurrency.lockutils [None req-298b76a9-0fa7-45b3-84c9-138485632e29 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Acquiring lock "94fbc312-96fb-4b62-adc7-18053d509eca" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 546.773108] env[62096]: DEBUG oslo_concurrency.lockutils [None req-298b76a9-0fa7-45b3-84c9-138485632e29 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Lock "94fbc312-96fb-4b62-adc7-18053d509eca" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 546.773959] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-298b76a9-0fa7-45b3-84c9-138485632e29 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Expecting reply to msg 731712dd43194f73a2ee65d97164b6e4 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 546.780717] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 731712dd43194f73a2ee65d97164b6e4 [ 546.873916] env[62096]: DEBUG nova.network.neutron [None req-f939c6b6-f865-40df-a5aa-fec5b14620ab tempest-InstanceActionsNegativeTestJSON-1063701282 tempest-InstanceActionsNegativeTestJSON-1063701282-project-member] [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] Successfully created port: 04bdae88-f432-4b2b-bd3c-e8b1341b7a76 {{(pid=62096) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 546.888414] env[62096]: DEBUG nova.compute.manager [req-1dd93ab9-f9f8-45f3-a8a8-30789cc0a002 req-d44a0afc-3884-47da-b24a-21540bb94fa4 service nova] [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] Received event network-vif-deleted-61b8e775-6352-4a95-8d43-acb5384ff45e {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 546.888688] env[62096]: DEBUG nova.compute.manager [req-1dd93ab9-f9f8-45f3-a8a8-30789cc0a002 req-d44a0afc-3884-47da-b24a-21540bb94fa4 service nova] [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] Received event network-changed-76965fb1-e16e-43bd-854f-ab0f78dc111e {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 546.888772] env[62096]: DEBUG nova.compute.manager [req-1dd93ab9-f9f8-45f3-a8a8-30789cc0a002 req-d44a0afc-3884-47da-b24a-21540bb94fa4 service nova] [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] Refreshing instance network info cache due to event network-changed-76965fb1-e16e-43bd-854f-ab0f78dc111e. {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 546.888971] env[62096]: DEBUG oslo_concurrency.lockutils [req-1dd93ab9-f9f8-45f3-a8a8-30789cc0a002 req-d44a0afc-3884-47da-b24a-21540bb94fa4 service nova] Acquiring lock "refresh_cache-8001fc1b-fa8c-496d-acc8-922cfd5d5cc2" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 546.889102] env[62096]: DEBUG oslo_concurrency.lockutils [req-1dd93ab9-f9f8-45f3-a8a8-30789cc0a002 req-d44a0afc-3884-47da-b24a-21540bb94fa4 service nova] Acquired lock "refresh_cache-8001fc1b-fa8c-496d-acc8-922cfd5d5cc2" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 546.889249] env[62096]: DEBUG nova.network.neutron [req-1dd93ab9-f9f8-45f3-a8a8-30789cc0a002 req-d44a0afc-3884-47da-b24a-21540bb94fa4 service nova] [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] Refreshing network info cache for port 76965fb1-e16e-43bd-854f-ab0f78dc111e {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 546.889658] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-1dd93ab9-f9f8-45f3-a8a8-30789cc0a002 req-d44a0afc-3884-47da-b24a-21540bb94fa4 service nova] Expecting reply to msg edddb77effcb4e5eabd4aa0b4f3a8dd7 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 546.899338] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg edddb77effcb4e5eabd4aa0b4f3a8dd7 [ 546.945058] env[62096]: DEBUG oslo_concurrency.lockutils [None req-212fcd2e-ae1a-4ce1-9ad3-08ffa272db5b tempest-AttachInterfacesV270Test-1431958456 tempest-AttachInterfacesV270Test-1431958456-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.848s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 546.945779] env[62096]: ERROR nova.compute.manager [None req-212fcd2e-ae1a-4ce1-9ad3-08ffa272db5b tempest-AttachInterfacesV270Test-1431958456 tempest-AttachInterfacesV270Test-1431958456-project-member] [instance: 84c97672-0027-43af-800d-a11c243e8825] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 61df2824-e44b-4f03-936c-841be8a9b052, please check neutron logs for more information. [ 546.945779] env[62096]: ERROR nova.compute.manager [instance: 84c97672-0027-43af-800d-a11c243e8825] Traceback (most recent call last): [ 546.945779] env[62096]: ERROR nova.compute.manager [instance: 84c97672-0027-43af-800d-a11c243e8825] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 546.945779] env[62096]: ERROR nova.compute.manager [instance: 84c97672-0027-43af-800d-a11c243e8825] self.driver.spawn(context, instance, image_meta, [ 546.945779] env[62096]: ERROR nova.compute.manager [instance: 84c97672-0027-43af-800d-a11c243e8825] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 546.945779] env[62096]: ERROR nova.compute.manager [instance: 84c97672-0027-43af-800d-a11c243e8825] self._vmops.spawn(context, instance, image_meta, injected_files, [ 546.945779] env[62096]: ERROR nova.compute.manager [instance: 84c97672-0027-43af-800d-a11c243e8825] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 546.945779] env[62096]: ERROR nova.compute.manager [instance: 84c97672-0027-43af-800d-a11c243e8825] vm_ref = self.build_virtual_machine(instance, [ 546.945779] env[62096]: ERROR nova.compute.manager [instance: 84c97672-0027-43af-800d-a11c243e8825] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 546.945779] env[62096]: ERROR nova.compute.manager [instance: 84c97672-0027-43af-800d-a11c243e8825] vif_infos = vmwarevif.get_vif_info(self._session, [ 546.945779] env[62096]: ERROR nova.compute.manager [instance: 84c97672-0027-43af-800d-a11c243e8825] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 546.946088] env[62096]: ERROR nova.compute.manager [instance: 84c97672-0027-43af-800d-a11c243e8825] for vif in network_info: [ 546.946088] env[62096]: ERROR nova.compute.manager [instance: 84c97672-0027-43af-800d-a11c243e8825] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 546.946088] env[62096]: ERROR nova.compute.manager [instance: 84c97672-0027-43af-800d-a11c243e8825] return self._sync_wrapper(fn, *args, **kwargs) [ 546.946088] env[62096]: ERROR nova.compute.manager [instance: 84c97672-0027-43af-800d-a11c243e8825] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 546.946088] env[62096]: ERROR nova.compute.manager [instance: 84c97672-0027-43af-800d-a11c243e8825] self.wait() [ 546.946088] env[62096]: ERROR nova.compute.manager [instance: 84c97672-0027-43af-800d-a11c243e8825] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 546.946088] env[62096]: ERROR nova.compute.manager [instance: 84c97672-0027-43af-800d-a11c243e8825] self[:] = self._gt.wait() [ 546.946088] env[62096]: ERROR nova.compute.manager [instance: 84c97672-0027-43af-800d-a11c243e8825] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 546.946088] env[62096]: ERROR nova.compute.manager [instance: 84c97672-0027-43af-800d-a11c243e8825] return self._exit_event.wait() [ 546.946088] env[62096]: ERROR nova.compute.manager [instance: 84c97672-0027-43af-800d-a11c243e8825] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 546.946088] env[62096]: ERROR nova.compute.manager [instance: 84c97672-0027-43af-800d-a11c243e8825] result = hub.switch() [ 546.946088] env[62096]: ERROR nova.compute.manager [instance: 84c97672-0027-43af-800d-a11c243e8825] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 546.946088] env[62096]: ERROR nova.compute.manager [instance: 84c97672-0027-43af-800d-a11c243e8825] return self.greenlet.switch() [ 546.946403] env[62096]: ERROR nova.compute.manager [instance: 84c97672-0027-43af-800d-a11c243e8825] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 546.946403] env[62096]: ERROR nova.compute.manager [instance: 84c97672-0027-43af-800d-a11c243e8825] result = function(*args, **kwargs) [ 546.946403] env[62096]: ERROR nova.compute.manager [instance: 84c97672-0027-43af-800d-a11c243e8825] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 546.946403] env[62096]: ERROR nova.compute.manager [instance: 84c97672-0027-43af-800d-a11c243e8825] return func(*args, **kwargs) [ 546.946403] env[62096]: ERROR nova.compute.manager [instance: 84c97672-0027-43af-800d-a11c243e8825] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 546.946403] env[62096]: ERROR nova.compute.manager [instance: 84c97672-0027-43af-800d-a11c243e8825] raise e [ 546.946403] env[62096]: ERROR nova.compute.manager [instance: 84c97672-0027-43af-800d-a11c243e8825] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 546.946403] env[62096]: ERROR nova.compute.manager [instance: 84c97672-0027-43af-800d-a11c243e8825] nwinfo = self.network_api.allocate_for_instance( [ 546.946403] env[62096]: ERROR nova.compute.manager [instance: 84c97672-0027-43af-800d-a11c243e8825] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 546.946403] env[62096]: ERROR nova.compute.manager [instance: 84c97672-0027-43af-800d-a11c243e8825] created_port_ids = self._update_ports_for_instance( [ 546.946403] env[62096]: ERROR nova.compute.manager [instance: 84c97672-0027-43af-800d-a11c243e8825] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 546.946403] env[62096]: ERROR nova.compute.manager [instance: 84c97672-0027-43af-800d-a11c243e8825] with excutils.save_and_reraise_exception(): [ 546.946403] env[62096]: ERROR nova.compute.manager [instance: 84c97672-0027-43af-800d-a11c243e8825] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 546.946754] env[62096]: ERROR nova.compute.manager [instance: 84c97672-0027-43af-800d-a11c243e8825] self.force_reraise() [ 546.946754] env[62096]: ERROR nova.compute.manager [instance: 84c97672-0027-43af-800d-a11c243e8825] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 546.946754] env[62096]: ERROR nova.compute.manager [instance: 84c97672-0027-43af-800d-a11c243e8825] raise self.value [ 546.946754] env[62096]: ERROR nova.compute.manager [instance: 84c97672-0027-43af-800d-a11c243e8825] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 546.946754] env[62096]: ERROR nova.compute.manager [instance: 84c97672-0027-43af-800d-a11c243e8825] updated_port = self._update_port( [ 546.946754] env[62096]: ERROR nova.compute.manager [instance: 84c97672-0027-43af-800d-a11c243e8825] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 546.946754] env[62096]: ERROR nova.compute.manager [instance: 84c97672-0027-43af-800d-a11c243e8825] _ensure_no_port_binding_failure(port) [ 546.946754] env[62096]: ERROR nova.compute.manager [instance: 84c97672-0027-43af-800d-a11c243e8825] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 546.946754] env[62096]: ERROR nova.compute.manager [instance: 84c97672-0027-43af-800d-a11c243e8825] raise exception.PortBindingFailed(port_id=port['id']) [ 546.946754] env[62096]: ERROR nova.compute.manager [instance: 84c97672-0027-43af-800d-a11c243e8825] nova.exception.PortBindingFailed: Binding failed for port 61df2824-e44b-4f03-936c-841be8a9b052, please check neutron logs for more information. [ 546.946754] env[62096]: ERROR nova.compute.manager [instance: 84c97672-0027-43af-800d-a11c243e8825] [ 546.947022] env[62096]: DEBUG nova.compute.utils [None req-212fcd2e-ae1a-4ce1-9ad3-08ffa272db5b tempest-AttachInterfacesV270Test-1431958456 tempest-AttachInterfacesV270Test-1431958456-project-member] [instance: 84c97672-0027-43af-800d-a11c243e8825] Binding failed for port 61df2824-e44b-4f03-936c-841be8a9b052, please check neutron logs for more information. {{(pid=62096) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 546.947807] env[62096]: DEBUG oslo_concurrency.lockutils [None req-a07a81da-9645-47fc-94cd-e8687a5b10d0 tempest-ServerActionsTestJSON-232923977 tempest-ServerActionsTestJSON-232923977-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 11.352s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 546.956035] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a07a81da-9645-47fc-94cd-e8687a5b10d0 tempest-ServerActionsTestJSON-232923977 tempest-ServerActionsTestJSON-232923977-project-member] Expecting reply to msg 63d73a8fb0b640d9bf5db6338d329344 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 546.956035] env[62096]: DEBUG nova.compute.manager [None req-212fcd2e-ae1a-4ce1-9ad3-08ffa272db5b tempest-AttachInterfacesV270Test-1431958456 tempest-AttachInterfacesV270Test-1431958456-project-member] [instance: 84c97672-0027-43af-800d-a11c243e8825] Build of instance 84c97672-0027-43af-800d-a11c243e8825 was re-scheduled: Binding failed for port 61df2824-e44b-4f03-936c-841be8a9b052, please check neutron logs for more information. {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 546.956035] env[62096]: DEBUG nova.compute.manager [None req-212fcd2e-ae1a-4ce1-9ad3-08ffa272db5b tempest-AttachInterfacesV270Test-1431958456 tempest-AttachInterfacesV270Test-1431958456-project-member] [instance: 84c97672-0027-43af-800d-a11c243e8825] Unplugging VIFs for instance {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 546.956035] env[62096]: DEBUG oslo_concurrency.lockutils [None req-212fcd2e-ae1a-4ce1-9ad3-08ffa272db5b tempest-AttachInterfacesV270Test-1431958456 tempest-AttachInterfacesV270Test-1431958456-project-member] Acquiring lock "refresh_cache-84c97672-0027-43af-800d-a11c243e8825" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 546.956035] env[62096]: DEBUG oslo_concurrency.lockutils [None req-212fcd2e-ae1a-4ce1-9ad3-08ffa272db5b tempest-AttachInterfacesV270Test-1431958456 tempest-AttachInterfacesV270Test-1431958456-project-member] Acquired lock "refresh_cache-84c97672-0027-43af-800d-a11c243e8825" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 546.956337] env[62096]: DEBUG nova.network.neutron [None req-212fcd2e-ae1a-4ce1-9ad3-08ffa272db5b tempest-AttachInterfacesV270Test-1431958456 tempest-AttachInterfacesV270Test-1431958456-project-member] [instance: 84c97672-0027-43af-800d-a11c243e8825] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 546.956337] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-212fcd2e-ae1a-4ce1-9ad3-08ffa272db5b tempest-AttachInterfacesV270Test-1431958456 tempest-AttachInterfacesV270Test-1431958456-project-member] Expecting reply to msg ffab776f05cb4a07950c9b8477a50aa1 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 546.963107] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ffab776f05cb4a07950c9b8477a50aa1 [ 547.014771] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 63d73a8fb0b640d9bf5db6338d329344 [ 547.130494] env[62096]: DEBUG nova.compute.manager [None req-f939c6b6-f865-40df-a5aa-fec5b14620ab tempest-InstanceActionsNegativeTestJSON-1063701282 tempest-InstanceActionsNegativeTestJSON-1063701282-project-member] [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] Start spawning the instance on the hypervisor. {{(pid=62096) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 547.161188] env[62096]: DEBUG nova.virt.hardware [None req-f939c6b6-f865-40df-a5aa-fec5b14620ab tempest-InstanceActionsNegativeTestJSON-1063701282 tempest-InstanceActionsNegativeTestJSON-1063701282-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-19T11:52:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-19T11:51:55Z,direct_url=,disk_format='vmdk',id=fb764baa-3805-45c4-a694-aa91b0932110,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bd76085d004b408bbcf1ebf76ba2593e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-19T11:51:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 547.161435] env[62096]: DEBUG nova.virt.hardware [None req-f939c6b6-f865-40df-a5aa-fec5b14620ab tempest-InstanceActionsNegativeTestJSON-1063701282 tempest-InstanceActionsNegativeTestJSON-1063701282-project-member] Flavor limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 547.161583] env[62096]: DEBUG nova.virt.hardware [None req-f939c6b6-f865-40df-a5aa-fec5b14620ab tempest-InstanceActionsNegativeTestJSON-1063701282 tempest-InstanceActionsNegativeTestJSON-1063701282-project-member] Image limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 547.161777] env[62096]: DEBUG nova.virt.hardware [None req-f939c6b6-f865-40df-a5aa-fec5b14620ab tempest-InstanceActionsNegativeTestJSON-1063701282 tempest-InstanceActionsNegativeTestJSON-1063701282-project-member] Flavor pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 547.161995] env[62096]: DEBUG nova.virt.hardware [None req-f939c6b6-f865-40df-a5aa-fec5b14620ab tempest-InstanceActionsNegativeTestJSON-1063701282 tempest-InstanceActionsNegativeTestJSON-1063701282-project-member] Image pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 547.162245] env[62096]: DEBUG nova.virt.hardware [None req-f939c6b6-f865-40df-a5aa-fec5b14620ab tempest-InstanceActionsNegativeTestJSON-1063701282 tempest-InstanceActionsNegativeTestJSON-1063701282-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 547.162366] env[62096]: DEBUG nova.virt.hardware [None req-f939c6b6-f865-40df-a5aa-fec5b14620ab tempest-InstanceActionsNegativeTestJSON-1063701282 tempest-InstanceActionsNegativeTestJSON-1063701282-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 547.162518] env[62096]: DEBUG nova.virt.hardware [None req-f939c6b6-f865-40df-a5aa-fec5b14620ab tempest-InstanceActionsNegativeTestJSON-1063701282 tempest-InstanceActionsNegativeTestJSON-1063701282-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 547.162673] env[62096]: DEBUG nova.virt.hardware [None req-f939c6b6-f865-40df-a5aa-fec5b14620ab tempest-InstanceActionsNegativeTestJSON-1063701282 tempest-InstanceActionsNegativeTestJSON-1063701282-project-member] Got 1 possible topologies {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 547.162835] env[62096]: DEBUG nova.virt.hardware [None req-f939c6b6-f865-40df-a5aa-fec5b14620ab tempest-InstanceActionsNegativeTestJSON-1063701282 tempest-InstanceActionsNegativeTestJSON-1063701282-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 547.163000] env[62096]: DEBUG nova.virt.hardware [None req-f939c6b6-f865-40df-a5aa-fec5b14620ab tempest-InstanceActionsNegativeTestJSON-1063701282 tempest-InstanceActionsNegativeTestJSON-1063701282-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 547.163894] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2215945-8843-45dc-a3cf-f1c79593ce1c {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.172158] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4904ef6-dad1-4f55-9027-d5be45d1271a {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.271587] env[62096]: DEBUG nova.compute.manager [None req-298b76a9-0fa7-45b3-84c9-138485632e29 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] Starting instance... {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 547.273433] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-298b76a9-0fa7-45b3-84c9-138485632e29 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Expecting reply to msg 109abf19ffb34986b0bfa413513229ed in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 547.319253] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 109abf19ffb34986b0bfa413513229ed [ 547.405643] env[62096]: DEBUG oslo_concurrency.lockutils [None req-d85fd46d-4576-455c-a571-1e0033669f66 tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Acquiring lock "fcdeb1ed-7f21-4338-8964-63d16d275bc2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 547.405942] env[62096]: DEBUG oslo_concurrency.lockutils [None req-d85fd46d-4576-455c-a571-1e0033669f66 tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Lock "fcdeb1ed-7f21-4338-8964-63d16d275bc2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 547.430378] env[62096]: DEBUG nova.network.neutron [req-1dd93ab9-f9f8-45f3-a8a8-30789cc0a002 req-d44a0afc-3884-47da-b24a-21540bb94fa4 service nova] [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 547.507165] env[62096]: DEBUG nova.network.neutron [None req-212fcd2e-ae1a-4ce1-9ad3-08ffa272db5b tempest-AttachInterfacesV270Test-1431958456 tempest-AttachInterfacesV270Test-1431958456-project-member] [instance: 84c97672-0027-43af-800d-a11c243e8825] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 547.545306] env[62096]: DEBUG nova.network.neutron [req-1dd93ab9-f9f8-45f3-a8a8-30789cc0a002 req-d44a0afc-3884-47da-b24a-21540bb94fa4 service nova] [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 547.545828] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-1dd93ab9-f9f8-45f3-a8a8-30789cc0a002 req-d44a0afc-3884-47da-b24a-21540bb94fa4 service nova] Expecting reply to msg 0c43c7b0270040b1a6ff9c07643e23e0 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 547.559420] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0c43c7b0270040b1a6ff9c07643e23e0 [ 547.656479] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a65592fa-c66f-4308-ad73-d26432125aee {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.667090] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-758df849-04b2-4240-8d8f-214178e71a53 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.707333] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b998c5d-88d2-4711-a864-55db89b332a5 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.715068] env[62096]: DEBUG nova.network.neutron [None req-212fcd2e-ae1a-4ce1-9ad3-08ffa272db5b tempest-AttachInterfacesV270Test-1431958456 tempest-AttachInterfacesV270Test-1431958456-project-member] [instance: 84c97672-0027-43af-800d-a11c243e8825] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 547.715579] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-212fcd2e-ae1a-4ce1-9ad3-08ffa272db5b tempest-AttachInterfacesV270Test-1431958456 tempest-AttachInterfacesV270Test-1431958456-project-member] Expecting reply to msg 09b6bdfacd6641028f2bae62cdc61902 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 547.717366] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d38cb4f-bf0e-44a8-b6b9-6e4a921a70ed {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.728166] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 09b6bdfacd6641028f2bae62cdc61902 [ 547.740968] env[62096]: DEBUG nova.compute.provider_tree [None req-a07a81da-9645-47fc-94cd-e8687a5b10d0 tempest-ServerActionsTestJSON-232923977 tempest-ServerActionsTestJSON-232923977-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 547.741485] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a07a81da-9645-47fc-94cd-e8687a5b10d0 tempest-ServerActionsTestJSON-232923977 tempest-ServerActionsTestJSON-232923977-project-member] Expecting reply to msg 830664c7d9df4b55a9ef0e4c103c8a23 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 547.748934] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 830664c7d9df4b55a9ef0e4c103c8a23 [ 547.796467] env[62096]: DEBUG oslo_concurrency.lockutils [None req-298b76a9-0fa7-45b3-84c9-138485632e29 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 548.050972] env[62096]: DEBUG oslo_concurrency.lockutils [req-1dd93ab9-f9f8-45f3-a8a8-30789cc0a002 req-d44a0afc-3884-47da-b24a-21540bb94fa4 service nova] Releasing lock "refresh_cache-8001fc1b-fa8c-496d-acc8-922cfd5d5cc2" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 548.050972] env[62096]: DEBUG nova.compute.manager [req-1dd93ab9-f9f8-45f3-a8a8-30789cc0a002 req-d44a0afc-3884-47da-b24a-21540bb94fa4 service nova] [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] Received event network-vif-deleted-76965fb1-e16e-43bd-854f-ab0f78dc111e {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 548.229446] env[62096]: DEBUG oslo_concurrency.lockutils [None req-212fcd2e-ae1a-4ce1-9ad3-08ffa272db5b tempest-AttachInterfacesV270Test-1431958456 tempest-AttachInterfacesV270Test-1431958456-project-member] Releasing lock "refresh_cache-84c97672-0027-43af-800d-a11c243e8825" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 548.229704] env[62096]: DEBUG nova.compute.manager [None req-212fcd2e-ae1a-4ce1-9ad3-08ffa272db5b tempest-AttachInterfacesV270Test-1431958456 tempest-AttachInterfacesV270Test-1431958456-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 548.229885] env[62096]: DEBUG nova.compute.manager [None req-212fcd2e-ae1a-4ce1-9ad3-08ffa272db5b tempest-AttachInterfacesV270Test-1431958456 tempest-AttachInterfacesV270Test-1431958456-project-member] [instance: 84c97672-0027-43af-800d-a11c243e8825] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 548.230067] env[62096]: DEBUG nova.network.neutron [None req-212fcd2e-ae1a-4ce1-9ad3-08ffa272db5b tempest-AttachInterfacesV270Test-1431958456 tempest-AttachInterfacesV270Test-1431958456-project-member] [instance: 84c97672-0027-43af-800d-a11c243e8825] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 548.243851] env[62096]: DEBUG nova.scheduler.client.report [None req-a07a81da-9645-47fc-94cd-e8687a5b10d0 tempest-ServerActionsTestJSON-232923977 tempest-ServerActionsTestJSON-232923977-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 548.246230] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a07a81da-9645-47fc-94cd-e8687a5b10d0 tempest-ServerActionsTestJSON-232923977 tempest-ServerActionsTestJSON-232923977-project-member] Expecting reply to msg 3a9521ea5711411aa2b631783287e9ef in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 548.257504] env[62096]: DEBUG nova.network.neutron [None req-212fcd2e-ae1a-4ce1-9ad3-08ffa272db5b tempest-AttachInterfacesV270Test-1431958456 tempest-AttachInterfacesV270Test-1431958456-project-member] [instance: 84c97672-0027-43af-800d-a11c243e8825] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 548.258100] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-212fcd2e-ae1a-4ce1-9ad3-08ffa272db5b tempest-AttachInterfacesV270Test-1431958456 tempest-AttachInterfacesV270Test-1431958456-project-member] Expecting reply to msg 1feb4cf70639495db5ffe2d214b2c416 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 548.259121] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3a9521ea5711411aa2b631783287e9ef [ 548.264976] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1feb4cf70639495db5ffe2d214b2c416 [ 548.336370] env[62096]: DEBUG nova.compute.manager [req-6d7f0503-5e22-4cbd-86df-19dba6950cb3 req-0b3c3975-e90b-4335-93c1-f59375ebf42c service nova] [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] Received event network-vif-deleted-cc3376b5-d3cb-420a-804b-c6a03e40dccd {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 548.756128] env[62096]: DEBUG oslo_concurrency.lockutils [None req-a07a81da-9645-47fc-94cd-e8687a5b10d0 tempest-ServerActionsTestJSON-232923977 tempest-ServerActionsTestJSON-232923977-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.802s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 548.756128] env[62096]: ERROR nova.compute.manager [None req-a07a81da-9645-47fc-94cd-e8687a5b10d0 tempest-ServerActionsTestJSON-232923977 tempest-ServerActionsTestJSON-232923977-project-member] [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 00d94847-7f83-4bc0-ba71-e7943d2957af, please check neutron logs for more information. [ 548.756128] env[62096]: ERROR nova.compute.manager [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] Traceback (most recent call last): [ 548.756128] env[62096]: ERROR nova.compute.manager [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 548.756128] env[62096]: ERROR nova.compute.manager [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] self.driver.spawn(context, instance, image_meta, [ 548.756128] env[62096]: ERROR nova.compute.manager [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 548.756128] env[62096]: ERROR nova.compute.manager [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] self._vmops.spawn(context, instance, image_meta, injected_files, [ 548.756128] env[62096]: ERROR nova.compute.manager [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 548.756128] env[62096]: ERROR nova.compute.manager [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] vm_ref = self.build_virtual_machine(instance, [ 548.756600] env[62096]: ERROR nova.compute.manager [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 548.756600] env[62096]: ERROR nova.compute.manager [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] vif_infos = vmwarevif.get_vif_info(self._session, [ 548.756600] env[62096]: ERROR nova.compute.manager [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 548.756600] env[62096]: ERROR nova.compute.manager [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] for vif in network_info: [ 548.756600] env[62096]: ERROR nova.compute.manager [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 548.756600] env[62096]: ERROR nova.compute.manager [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] return self._sync_wrapper(fn, *args, **kwargs) [ 548.756600] env[62096]: ERROR nova.compute.manager [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 548.756600] env[62096]: ERROR nova.compute.manager [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] self.wait() [ 548.756600] env[62096]: ERROR nova.compute.manager [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 548.756600] env[62096]: ERROR nova.compute.manager [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] self[:] = self._gt.wait() [ 548.756600] env[62096]: ERROR nova.compute.manager [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 548.756600] env[62096]: ERROR nova.compute.manager [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] return self._exit_event.wait() [ 548.756600] env[62096]: ERROR nova.compute.manager [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 548.756950] env[62096]: ERROR nova.compute.manager [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] result = hub.switch() [ 548.756950] env[62096]: ERROR nova.compute.manager [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 548.756950] env[62096]: ERROR nova.compute.manager [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] return self.greenlet.switch() [ 548.756950] env[62096]: ERROR nova.compute.manager [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 548.756950] env[62096]: ERROR nova.compute.manager [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] result = function(*args, **kwargs) [ 548.756950] env[62096]: ERROR nova.compute.manager [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 548.756950] env[62096]: ERROR nova.compute.manager [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] return func(*args, **kwargs) [ 548.756950] env[62096]: ERROR nova.compute.manager [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 548.756950] env[62096]: ERROR nova.compute.manager [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] raise e [ 548.756950] env[62096]: ERROR nova.compute.manager [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 548.756950] env[62096]: ERROR nova.compute.manager [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] nwinfo = self.network_api.allocate_for_instance( [ 548.756950] env[62096]: ERROR nova.compute.manager [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 548.756950] env[62096]: ERROR nova.compute.manager [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] created_port_ids = self._update_ports_for_instance( [ 548.757287] env[62096]: ERROR nova.compute.manager [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 548.757287] env[62096]: ERROR nova.compute.manager [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] with excutils.save_and_reraise_exception(): [ 548.757287] env[62096]: ERROR nova.compute.manager [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 548.757287] env[62096]: ERROR nova.compute.manager [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] self.force_reraise() [ 548.757287] env[62096]: ERROR nova.compute.manager [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 548.757287] env[62096]: ERROR nova.compute.manager [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] raise self.value [ 548.757287] env[62096]: ERROR nova.compute.manager [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 548.757287] env[62096]: ERROR nova.compute.manager [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] updated_port = self._update_port( [ 548.757287] env[62096]: ERROR nova.compute.manager [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 548.757287] env[62096]: ERROR nova.compute.manager [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] _ensure_no_port_binding_failure(port) [ 548.757287] env[62096]: ERROR nova.compute.manager [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 548.757287] env[62096]: ERROR nova.compute.manager [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] raise exception.PortBindingFailed(port_id=port['id']) [ 548.757601] env[62096]: ERROR nova.compute.manager [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] nova.exception.PortBindingFailed: Binding failed for port 00d94847-7f83-4bc0-ba71-e7943d2957af, please check neutron logs for more information. [ 548.757601] env[62096]: ERROR nova.compute.manager [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] [ 548.757601] env[62096]: DEBUG nova.compute.utils [None req-a07a81da-9645-47fc-94cd-e8687a5b10d0 tempest-ServerActionsTestJSON-232923977 tempest-ServerActionsTestJSON-232923977-project-member] [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] Binding failed for port 00d94847-7f83-4bc0-ba71-e7943d2957af, please check neutron logs for more information. {{(pid=62096) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 548.757601] env[62096]: DEBUG oslo_concurrency.lockutils [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 12.109s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 548.757601] env[62096]: DEBUG oslo_concurrency.lockutils [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 548.757601] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62096) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 548.757770] env[62096]: DEBUG oslo_concurrency.lockutils [None req-94f33e3e-2f48-4192-a8c5-afbbc2b01234 tempest-ServerDiagnosticsNegativeTest-1941278843 tempest-ServerDiagnosticsNegativeTest-1941278843-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 10.241s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 548.757770] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-94f33e3e-2f48-4192-a8c5-afbbc2b01234 tempest-ServerDiagnosticsNegativeTest-1941278843 tempest-ServerDiagnosticsNegativeTest-1941278843-project-member] Expecting reply to msg 671a043fda56424e915cc3979c521d08 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 548.757770] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e923b79-d191-46e9-99b8-e1288d15caee {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.758929] env[62096]: DEBUG nova.compute.manager [None req-a07a81da-9645-47fc-94cd-e8687a5b10d0 tempest-ServerActionsTestJSON-232923977 tempest-ServerActionsTestJSON-232923977-project-member] [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] Build of instance 018ae11e-bc51-4e0b-8f2e-c464fca6f375 was re-scheduled: Binding failed for port 00d94847-7f83-4bc0-ba71-e7943d2957af, please check neutron logs for more information. {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 548.759363] env[62096]: DEBUG nova.compute.manager [None req-a07a81da-9645-47fc-94cd-e8687a5b10d0 tempest-ServerActionsTestJSON-232923977 tempest-ServerActionsTestJSON-232923977-project-member] [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] Unplugging VIFs for instance {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 548.759584] env[62096]: DEBUG oslo_concurrency.lockutils [None req-a07a81da-9645-47fc-94cd-e8687a5b10d0 tempest-ServerActionsTestJSON-232923977 tempest-ServerActionsTestJSON-232923977-project-member] Acquiring lock "refresh_cache-018ae11e-bc51-4e0b-8f2e-c464fca6f375" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 548.759729] env[62096]: DEBUG oslo_concurrency.lockutils [None req-a07a81da-9645-47fc-94cd-e8687a5b10d0 tempest-ServerActionsTestJSON-232923977 tempest-ServerActionsTestJSON-232923977-project-member] Acquired lock "refresh_cache-018ae11e-bc51-4e0b-8f2e-c464fca6f375" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 548.759885] env[62096]: DEBUG nova.network.neutron [None req-a07a81da-9645-47fc-94cd-e8687a5b10d0 tempest-ServerActionsTestJSON-232923977 tempest-ServerActionsTestJSON-232923977-project-member] [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 548.760317] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a07a81da-9645-47fc-94cd-e8687a5b10d0 tempest-ServerActionsTestJSON-232923977 tempest-ServerActionsTestJSON-232923977-project-member] Expecting reply to msg 50c117abfcd44d8884fa5fb3a5f56f5a in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 548.761315] env[62096]: DEBUG nova.network.neutron [None req-212fcd2e-ae1a-4ce1-9ad3-08ffa272db5b tempest-AttachInterfacesV270Test-1431958456 tempest-AttachInterfacesV270Test-1431958456-project-member] [instance: 84c97672-0027-43af-800d-a11c243e8825] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 548.761634] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-212fcd2e-ae1a-4ce1-9ad3-08ffa272db5b tempest-AttachInterfacesV270Test-1431958456 tempest-AttachInterfacesV270Test-1431958456-project-member] Expecting reply to msg d84dbcc418834f53a126eeefb88fe9d3 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 548.768375] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3817e77-c7d1-4c1f-93e6-0fc57449e8f9 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.772783] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 50c117abfcd44d8884fa5fb3a5f56f5a [ 548.782586] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d84dbcc418834f53a126eeefb88fe9d3 [ 548.783602] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1ee43bc-7c99-409d-afcb-763d845a1c67 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.791260] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77f208b6-fa44-47c4-b8e4-0275debf1c4f {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.825183] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 671a043fda56424e915cc3979c521d08 [ 548.825841] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181789MB free_disk=126GB free_vcpus=48 pci_devices=None {{(pid=62096) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 548.825937] env[62096]: DEBUG oslo_concurrency.lockutils [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 549.270784] env[62096]: INFO nova.compute.manager [None req-212fcd2e-ae1a-4ce1-9ad3-08ffa272db5b tempest-AttachInterfacesV270Test-1431958456 tempest-AttachInterfacesV270Test-1431958456-project-member] [instance: 84c97672-0027-43af-800d-a11c243e8825] Took 1.04 seconds to deallocate network for instance. [ 549.272605] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-212fcd2e-ae1a-4ce1-9ad3-08ffa272db5b tempest-AttachInterfacesV270Test-1431958456 tempest-AttachInterfacesV270Test-1431958456-project-member] Expecting reply to msg 8b61b4e812de4fa89ecb965efff84306 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 549.325203] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8b61b4e812de4fa89ecb965efff84306 [ 549.401830] env[62096]: DEBUG nova.network.neutron [None req-a07a81da-9645-47fc-94cd-e8687a5b10d0 tempest-ServerActionsTestJSON-232923977 tempest-ServerActionsTestJSON-232923977-project-member] [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 549.487301] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-463bef6c-67da-49b8-a062-0ac3ea8dcd41 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.495651] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0df38513-c8ab-40ae-bb2d-06c48ad0dd64 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.527706] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-580f7cb1-a515-4c14-920f-a5cb4b6e5916 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.531339] env[62096]: DEBUG nova.network.neutron [None req-a07a81da-9645-47fc-94cd-e8687a5b10d0 tempest-ServerActionsTestJSON-232923977 tempest-ServerActionsTestJSON-232923977-project-member] [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 549.532111] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a07a81da-9645-47fc-94cd-e8687a5b10d0 tempest-ServerActionsTestJSON-232923977 tempest-ServerActionsTestJSON-232923977-project-member] Expecting reply to msg 3c61ccf32bcb481ba5ae86941b73f64e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 549.539736] env[62096]: ERROR nova.compute.manager [None req-f939c6b6-f865-40df-a5aa-fec5b14620ab tempest-InstanceActionsNegativeTestJSON-1063701282 tempest-InstanceActionsNegativeTestJSON-1063701282-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 04bdae88-f432-4b2b-bd3c-e8b1341b7a76, please check neutron logs for more information. [ 549.539736] env[62096]: ERROR nova.compute.manager Traceback (most recent call last): [ 549.539736] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 549.539736] env[62096]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 549.539736] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 549.539736] env[62096]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 549.539736] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 549.539736] env[62096]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 549.539736] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 549.539736] env[62096]: ERROR nova.compute.manager self.force_reraise() [ 549.539736] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 549.539736] env[62096]: ERROR nova.compute.manager raise self.value [ 549.539736] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 549.539736] env[62096]: ERROR nova.compute.manager updated_port = self._update_port( [ 549.539736] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 549.539736] env[62096]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 549.540447] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 549.540447] env[62096]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 549.540447] env[62096]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 04bdae88-f432-4b2b-bd3c-e8b1341b7a76, please check neutron logs for more information. [ 549.540447] env[62096]: ERROR nova.compute.manager [ 549.540447] env[62096]: Traceback (most recent call last): [ 549.540447] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 549.540447] env[62096]: listener.cb(fileno) [ 549.540447] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 549.540447] env[62096]: result = function(*args, **kwargs) [ 549.540447] env[62096]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 549.540447] env[62096]: return func(*args, **kwargs) [ 549.540447] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 549.540447] env[62096]: raise e [ 549.540447] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 549.540447] env[62096]: nwinfo = self.network_api.allocate_for_instance( [ 549.540447] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 549.540447] env[62096]: created_port_ids = self._update_ports_for_instance( [ 549.540447] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 549.540447] env[62096]: with excutils.save_and_reraise_exception(): [ 549.540447] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 549.540447] env[62096]: self.force_reraise() [ 549.540447] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 549.540447] env[62096]: raise self.value [ 549.540447] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 549.540447] env[62096]: updated_port = self._update_port( [ 549.540447] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 549.540447] env[62096]: _ensure_no_port_binding_failure(port) [ 549.540447] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 549.540447] env[62096]: raise exception.PortBindingFailed(port_id=port['id']) [ 549.541154] env[62096]: nova.exception.PortBindingFailed: Binding failed for port 04bdae88-f432-4b2b-bd3c-e8b1341b7a76, please check neutron logs for more information. [ 549.541154] env[62096]: Removing descriptor: 19 [ 549.541154] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d0eaf57-d97a-470b-ac6b-d66f09ec30c4 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.547133] env[62096]: ERROR nova.compute.manager [None req-f939c6b6-f865-40df-a5aa-fec5b14620ab tempest-InstanceActionsNegativeTestJSON-1063701282 tempest-InstanceActionsNegativeTestJSON-1063701282-project-member] [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 04bdae88-f432-4b2b-bd3c-e8b1341b7a76, please check neutron logs for more information. [ 549.547133] env[62096]: ERROR nova.compute.manager [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] Traceback (most recent call last): [ 549.547133] env[62096]: ERROR nova.compute.manager [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 549.547133] env[62096]: ERROR nova.compute.manager [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] yield resources [ 549.547133] env[62096]: ERROR nova.compute.manager [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 549.547133] env[62096]: ERROR nova.compute.manager [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] self.driver.spawn(context, instance, image_meta, [ 549.547133] env[62096]: ERROR nova.compute.manager [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 549.547133] env[62096]: ERROR nova.compute.manager [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] self._vmops.spawn(context, instance, image_meta, injected_files, [ 549.547133] env[62096]: ERROR nova.compute.manager [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 549.547133] env[62096]: ERROR nova.compute.manager [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] vm_ref = self.build_virtual_machine(instance, [ 549.547133] env[62096]: ERROR nova.compute.manager [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 549.547528] env[62096]: ERROR nova.compute.manager [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] vif_infos = vmwarevif.get_vif_info(self._session, [ 549.547528] env[62096]: ERROR nova.compute.manager [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 549.547528] env[62096]: ERROR nova.compute.manager [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] for vif in network_info: [ 549.547528] env[62096]: ERROR nova.compute.manager [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 549.547528] env[62096]: ERROR nova.compute.manager [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] return self._sync_wrapper(fn, *args, **kwargs) [ 549.547528] env[62096]: ERROR nova.compute.manager [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 549.547528] env[62096]: ERROR nova.compute.manager [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] self.wait() [ 549.547528] env[62096]: ERROR nova.compute.manager [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 549.547528] env[62096]: ERROR nova.compute.manager [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] self[:] = self._gt.wait() [ 549.547528] env[62096]: ERROR nova.compute.manager [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 549.547528] env[62096]: ERROR nova.compute.manager [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] return self._exit_event.wait() [ 549.547528] env[62096]: ERROR nova.compute.manager [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 549.547528] env[62096]: ERROR nova.compute.manager [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] result = hub.switch() [ 549.547881] env[62096]: ERROR nova.compute.manager [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 549.547881] env[62096]: ERROR nova.compute.manager [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] return self.greenlet.switch() [ 549.547881] env[62096]: ERROR nova.compute.manager [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 549.547881] env[62096]: ERROR nova.compute.manager [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] result = function(*args, **kwargs) [ 549.547881] env[62096]: ERROR nova.compute.manager [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 549.547881] env[62096]: ERROR nova.compute.manager [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] return func(*args, **kwargs) [ 549.547881] env[62096]: ERROR nova.compute.manager [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 549.547881] env[62096]: ERROR nova.compute.manager [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] raise e [ 549.547881] env[62096]: ERROR nova.compute.manager [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 549.547881] env[62096]: ERROR nova.compute.manager [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] nwinfo = self.network_api.allocate_for_instance( [ 549.547881] env[62096]: ERROR nova.compute.manager [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 549.547881] env[62096]: ERROR nova.compute.manager [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] created_port_ids = self._update_ports_for_instance( [ 549.547881] env[62096]: ERROR nova.compute.manager [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 549.548313] env[62096]: ERROR nova.compute.manager [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] with excutils.save_and_reraise_exception(): [ 549.548313] env[62096]: ERROR nova.compute.manager [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 549.548313] env[62096]: ERROR nova.compute.manager [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] self.force_reraise() [ 549.548313] env[62096]: ERROR nova.compute.manager [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 549.548313] env[62096]: ERROR nova.compute.manager [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] raise self.value [ 549.548313] env[62096]: ERROR nova.compute.manager [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 549.548313] env[62096]: ERROR nova.compute.manager [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] updated_port = self._update_port( [ 549.548313] env[62096]: ERROR nova.compute.manager [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 549.548313] env[62096]: ERROR nova.compute.manager [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] _ensure_no_port_binding_failure(port) [ 549.548313] env[62096]: ERROR nova.compute.manager [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 549.548313] env[62096]: ERROR nova.compute.manager [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] raise exception.PortBindingFailed(port_id=port['id']) [ 549.548313] env[62096]: ERROR nova.compute.manager [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] nova.exception.PortBindingFailed: Binding failed for port 04bdae88-f432-4b2b-bd3c-e8b1341b7a76, please check neutron logs for more information. [ 549.548313] env[62096]: ERROR nova.compute.manager [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] [ 549.548656] env[62096]: INFO nova.compute.manager [None req-f939c6b6-f865-40df-a5aa-fec5b14620ab tempest-InstanceActionsNegativeTestJSON-1063701282 tempest-InstanceActionsNegativeTestJSON-1063701282-project-member] [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] Terminating instance [ 549.549785] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3c61ccf32bcb481ba5ae86941b73f64e [ 549.550729] env[62096]: DEBUG oslo_concurrency.lockutils [None req-f939c6b6-f865-40df-a5aa-fec5b14620ab tempest-InstanceActionsNegativeTestJSON-1063701282 tempest-InstanceActionsNegativeTestJSON-1063701282-project-member] Acquiring lock "refresh_cache-9ebe9a03-3fc2-4a77-bb2b-d220e9693115" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 549.551154] env[62096]: DEBUG oslo_concurrency.lockutils [None req-f939c6b6-f865-40df-a5aa-fec5b14620ab tempest-InstanceActionsNegativeTestJSON-1063701282 tempest-InstanceActionsNegativeTestJSON-1063701282-project-member] Acquired lock "refresh_cache-9ebe9a03-3fc2-4a77-bb2b-d220e9693115" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 549.551625] env[62096]: DEBUG nova.network.neutron [None req-f939c6b6-f865-40df-a5aa-fec5b14620ab tempest-InstanceActionsNegativeTestJSON-1063701282 tempest-InstanceActionsNegativeTestJSON-1063701282-project-member] [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 549.552452] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-f939c6b6-f865-40df-a5aa-fec5b14620ab tempest-InstanceActionsNegativeTestJSON-1063701282 tempest-InstanceActionsNegativeTestJSON-1063701282-project-member] Expecting reply to msg 64cdf3fa7bc8425f9d98d147caf8bc67 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 549.571251] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 64cdf3fa7bc8425f9d98d147caf8bc67 [ 549.572892] env[62096]: DEBUG oslo_concurrency.lockutils [None req-ba7630fb-4f65-44a7-974c-3e7e0c36eacf tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Acquiring lock "1377dbd7-9025-4683-be56-6e8987a5d72e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 549.573175] env[62096]: DEBUG oslo_concurrency.lockutils [None req-ba7630fb-4f65-44a7-974c-3e7e0c36eacf tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Lock "1377dbd7-9025-4683-be56-6e8987a5d72e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.002s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 549.573637] env[62096]: DEBUG nova.compute.provider_tree [None req-94f33e3e-2f48-4192-a8c5-afbbc2b01234 tempest-ServerDiagnosticsNegativeTest-1941278843 tempest-ServerDiagnosticsNegativeTest-1941278843-project-member] Updating inventory in ProviderTree for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 549.574213] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-94f33e3e-2f48-4192-a8c5-afbbc2b01234 tempest-ServerDiagnosticsNegativeTest-1941278843 tempest-ServerDiagnosticsNegativeTest-1941278843-project-member] Expecting reply to msg d499c64658af4bf2a710834141726d41 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 549.581306] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d499c64658af4bf2a710834141726d41 [ 549.776804] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-212fcd2e-ae1a-4ce1-9ad3-08ffa272db5b tempest-AttachInterfacesV270Test-1431958456 tempest-AttachInterfacesV270Test-1431958456-project-member] Expecting reply to msg 8880dd40d8284c349cb05340d0286797 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 549.840217] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8880dd40d8284c349cb05340d0286797 [ 550.037561] env[62096]: DEBUG oslo_concurrency.lockutils [None req-a07a81da-9645-47fc-94cd-e8687a5b10d0 tempest-ServerActionsTestJSON-232923977 tempest-ServerActionsTestJSON-232923977-project-member] Releasing lock "refresh_cache-018ae11e-bc51-4e0b-8f2e-c464fca6f375" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 550.037561] env[62096]: DEBUG nova.compute.manager [None req-a07a81da-9645-47fc-94cd-e8687a5b10d0 tempest-ServerActionsTestJSON-232923977 tempest-ServerActionsTestJSON-232923977-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 550.037561] env[62096]: DEBUG nova.compute.manager [None req-a07a81da-9645-47fc-94cd-e8687a5b10d0 tempest-ServerActionsTestJSON-232923977 tempest-ServerActionsTestJSON-232923977-project-member] [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 550.037561] env[62096]: DEBUG nova.network.neutron [None req-a07a81da-9645-47fc-94cd-e8687a5b10d0 tempest-ServerActionsTestJSON-232923977 tempest-ServerActionsTestJSON-232923977-project-member] [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 550.102819] env[62096]: DEBUG nova.network.neutron [None req-a07a81da-9645-47fc-94cd-e8687a5b10d0 tempest-ServerActionsTestJSON-232923977 tempest-ServerActionsTestJSON-232923977-project-member] [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 550.103431] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a07a81da-9645-47fc-94cd-e8687a5b10d0 tempest-ServerActionsTestJSON-232923977 tempest-ServerActionsTestJSON-232923977-project-member] Expecting reply to msg 53e7015c45524c67949099c56047df8e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 550.109227] env[62096]: DEBUG nova.network.neutron [None req-f939c6b6-f865-40df-a5aa-fec5b14620ab tempest-InstanceActionsNegativeTestJSON-1063701282 tempest-InstanceActionsNegativeTestJSON-1063701282-project-member] [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 550.112136] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 53e7015c45524c67949099c56047df8e [ 550.142156] env[62096]: ERROR nova.scheduler.client.report [None req-94f33e3e-2f48-4192-a8c5-afbbc2b01234 tempest-ServerDiagnosticsNegativeTest-1941278843 tempest-ServerDiagnosticsNegativeTest-1941278843-project-member] [req-acd6a0d9-0002-4695-b9e9-2b4afde3bd69] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 6eefe13c-ab55-4c03-987f-47a62756c3b3. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-acd6a0d9-0002-4695-b9e9-2b4afde3bd69"}]}: nova.exception.PortBindingFailed: Binding failed for port aff25b36-e8c9-4587-8762-0c53e123f24e, please check neutron logs for more information. [ 550.173425] env[62096]: DEBUG nova.scheduler.client.report [None req-94f33e3e-2f48-4192-a8c5-afbbc2b01234 tempest-ServerDiagnosticsNegativeTest-1941278843 tempest-ServerDiagnosticsNegativeTest-1941278843-project-member] Refreshing inventories for resource provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 550.187459] env[62096]: DEBUG nova.scheduler.client.report [None req-94f33e3e-2f48-4192-a8c5-afbbc2b01234 tempest-ServerDiagnosticsNegativeTest-1941278843 tempest-ServerDiagnosticsNegativeTest-1941278843-project-member] Updating ProviderTree inventory for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 550.187679] env[62096]: DEBUG nova.compute.provider_tree [None req-94f33e3e-2f48-4192-a8c5-afbbc2b01234 tempest-ServerDiagnosticsNegativeTest-1941278843 tempest-ServerDiagnosticsNegativeTest-1941278843-project-member] Updating inventory in ProviderTree for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 550.198099] env[62096]: DEBUG nova.scheduler.client.report [None req-94f33e3e-2f48-4192-a8c5-afbbc2b01234 tempest-ServerDiagnosticsNegativeTest-1941278843 tempest-ServerDiagnosticsNegativeTest-1941278843-project-member] Refreshing aggregate associations for resource provider 6eefe13c-ab55-4c03-987f-47a62756c3b3, aggregates: None {{(pid=62096) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 550.216955] env[62096]: DEBUG nova.scheduler.client.report [None req-94f33e3e-2f48-4192-a8c5-afbbc2b01234 tempest-ServerDiagnosticsNegativeTest-1941278843 tempest-ServerDiagnosticsNegativeTest-1941278843-project-member] Refreshing trait associations for resource provider 6eefe13c-ab55-4c03-987f-47a62756c3b3, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO {{(pid=62096) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 550.234526] env[62096]: DEBUG nova.network.neutron [None req-f939c6b6-f865-40df-a5aa-fec5b14620ab tempest-InstanceActionsNegativeTestJSON-1063701282 tempest-InstanceActionsNegativeTestJSON-1063701282-project-member] [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 550.235039] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-f939c6b6-f865-40df-a5aa-fec5b14620ab tempest-InstanceActionsNegativeTestJSON-1063701282 tempest-InstanceActionsNegativeTestJSON-1063701282-project-member] Expecting reply to msg 521f8bb9189246bf86dbd42236332679 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 550.245751] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 521f8bb9189246bf86dbd42236332679 [ 550.309128] env[62096]: INFO nova.scheduler.client.report [None req-212fcd2e-ae1a-4ce1-9ad3-08ffa272db5b tempest-AttachInterfacesV270Test-1431958456 tempest-AttachInterfacesV270Test-1431958456-project-member] Deleted allocations for instance 84c97672-0027-43af-800d-a11c243e8825 [ 550.317517] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-212fcd2e-ae1a-4ce1-9ad3-08ffa272db5b tempest-AttachInterfacesV270Test-1431958456 tempest-AttachInterfacesV270Test-1431958456-project-member] Expecting reply to msg 92973b4b8b3446ce96e2c5455086845a in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 550.334734] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 92973b4b8b3446ce96e2c5455086845a [ 550.470288] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da78104c-56bd-4554-bc61-42a4477db33f {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.475233] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eab39200-6671-4d8e-b956-aca3ef9e6879 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.508304] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a8bcd2e-7219-46cb-807a-d7df6acbd507 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.517079] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74410ef0-fda8-4032-a29b-33e683baaedb {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.534777] env[62096]: DEBUG nova.compute.provider_tree [None req-94f33e3e-2f48-4192-a8c5-afbbc2b01234 tempest-ServerDiagnosticsNegativeTest-1941278843 tempest-ServerDiagnosticsNegativeTest-1941278843-project-member] Updating inventory in ProviderTree for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 550.535560] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-94f33e3e-2f48-4192-a8c5-afbbc2b01234 tempest-ServerDiagnosticsNegativeTest-1941278843 tempest-ServerDiagnosticsNegativeTest-1941278843-project-member] Expecting reply to msg b966bf332af1440fa9b64a027311237f in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 550.556048] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b966bf332af1440fa9b64a027311237f [ 550.605744] env[62096]: DEBUG nova.network.neutron [None req-a07a81da-9645-47fc-94cd-e8687a5b10d0 tempest-ServerActionsTestJSON-232923977 tempest-ServerActionsTestJSON-232923977-project-member] [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 550.606306] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a07a81da-9645-47fc-94cd-e8687a5b10d0 tempest-ServerActionsTestJSON-232923977 tempest-ServerActionsTestJSON-232923977-project-member] Expecting reply to msg afc40bf9a5734ef2aa45c695c28b195f in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 550.618190] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg afc40bf9a5734ef2aa45c695c28b195f [ 550.622531] env[62096]: DEBUG oslo_concurrency.lockutils [None req-c1291365-a5cc-4752-acce-920ad54dbd71 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Acquiring lock "09410bd9-aa4e-49a8-86fb-8058b842bd72" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 550.622746] env[62096]: DEBUG oslo_concurrency.lockutils [None req-c1291365-a5cc-4752-acce-920ad54dbd71 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Lock "09410bd9-aa4e-49a8-86fb-8058b842bd72" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 550.663723] env[62096]: DEBUG oslo_concurrency.lockutils [None req-19c9aad9-d898-4c0f-b588-f60abf561c62 tempest-ServersTestBootFromVolume-2123063213 tempest-ServersTestBootFromVolume-2123063213-project-member] Acquiring lock "4fca4cf6-1c0e-487e-bf26-fc441d143128" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 550.663961] env[62096]: DEBUG oslo_concurrency.lockutils [None req-19c9aad9-d898-4c0f-b588-f60abf561c62 tempest-ServersTestBootFromVolume-2123063213 tempest-ServersTestBootFromVolume-2123063213-project-member] Lock "4fca4cf6-1c0e-487e-bf26-fc441d143128" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 550.740359] env[62096]: DEBUG oslo_concurrency.lockutils [None req-f939c6b6-f865-40df-a5aa-fec5b14620ab tempest-InstanceActionsNegativeTestJSON-1063701282 tempest-InstanceActionsNegativeTestJSON-1063701282-project-member] Releasing lock "refresh_cache-9ebe9a03-3fc2-4a77-bb2b-d220e9693115" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 550.740770] env[62096]: DEBUG nova.compute.manager [None req-f939c6b6-f865-40df-a5aa-fec5b14620ab tempest-InstanceActionsNegativeTestJSON-1063701282 tempest-InstanceActionsNegativeTestJSON-1063701282-project-member] [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] Start destroying the instance on the hypervisor. {{(pid=62096) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 550.740956] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-f939c6b6-f865-40df-a5aa-fec5b14620ab tempest-InstanceActionsNegativeTestJSON-1063701282 tempest-InstanceActionsNegativeTestJSON-1063701282-project-member] [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] Destroying instance {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 550.741284] env[62096]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ccce7d6f-70be-4442-93e2-4cc235e880b6 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.750539] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e3b3962-3f36-4cbc-ad27-9ac90ec85921 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.773423] env[62096]: WARNING nova.virt.vmwareapi.vmops [None req-f939c6b6-f865-40df-a5aa-fec5b14620ab tempest-InstanceActionsNegativeTestJSON-1063701282 tempest-InstanceActionsNegativeTestJSON-1063701282-project-member] [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 9ebe9a03-3fc2-4a77-bb2b-d220e9693115 could not be found. [ 550.773640] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-f939c6b6-f865-40df-a5aa-fec5b14620ab tempest-InstanceActionsNegativeTestJSON-1063701282 tempest-InstanceActionsNegativeTestJSON-1063701282-project-member] [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] Instance destroyed {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 550.773820] env[62096]: INFO nova.compute.manager [None req-f939c6b6-f865-40df-a5aa-fec5b14620ab tempest-InstanceActionsNegativeTestJSON-1063701282 tempest-InstanceActionsNegativeTestJSON-1063701282-project-member] [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] Took 0.03 seconds to destroy the instance on the hypervisor. [ 550.774121] env[62096]: DEBUG oslo.service.loopingcall [None req-f939c6b6-f865-40df-a5aa-fec5b14620ab tempest-InstanceActionsNegativeTestJSON-1063701282 tempest-InstanceActionsNegativeTestJSON-1063701282-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 550.774337] env[62096]: DEBUG nova.compute.manager [-] [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 550.774425] env[62096]: DEBUG nova.network.neutron [-] [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 550.797037] env[62096]: DEBUG nova.network.neutron [-] [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 550.797594] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 7ae26bb533234a6aa9ae518b1f7b78c6 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 550.804935] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7ae26bb533234a6aa9ae518b1f7b78c6 [ 550.821778] env[62096]: DEBUG oslo_concurrency.lockutils [None req-212fcd2e-ae1a-4ce1-9ad3-08ffa272db5b tempest-AttachInterfacesV270Test-1431958456 tempest-AttachInterfacesV270Test-1431958456-project-member] Lock "84c97672-0027-43af-800d-a11c243e8825" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.282s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 550.823150] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d85fd46d-4576-455c-a571-1e0033669f66 tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Expecting reply to msg 5481f0a93e7d40daa17cadd562036b13 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 550.838771] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5481f0a93e7d40daa17cadd562036b13 [ 551.059457] env[62096]: ERROR nova.scheduler.client.report [None req-94f33e3e-2f48-4192-a8c5-afbbc2b01234 tempest-ServerDiagnosticsNegativeTest-1941278843 tempest-ServerDiagnosticsNegativeTest-1941278843-project-member] [req-4dcbe017-1acb-4891-8127-9758212e64a6] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 6eefe13c-ab55-4c03-987f-47a62756c3b3. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-4dcbe017-1acb-4891-8127-9758212e64a6"}]}: nova.exception.PortBindingFailed: Binding failed for port aff25b36-e8c9-4587-8762-0c53e123f24e, please check neutron logs for more information. [ 551.074875] env[62096]: DEBUG nova.scheduler.client.report [None req-94f33e3e-2f48-4192-a8c5-afbbc2b01234 tempest-ServerDiagnosticsNegativeTest-1941278843 tempest-ServerDiagnosticsNegativeTest-1941278843-project-member] Refreshing inventories for resource provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 551.090988] env[62096]: DEBUG nova.scheduler.client.report [None req-94f33e3e-2f48-4192-a8c5-afbbc2b01234 tempest-ServerDiagnosticsNegativeTest-1941278843 tempest-ServerDiagnosticsNegativeTest-1941278843-project-member] Updating ProviderTree inventory for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 551.091267] env[62096]: DEBUG nova.compute.provider_tree [None req-94f33e3e-2f48-4192-a8c5-afbbc2b01234 tempest-ServerDiagnosticsNegativeTest-1941278843 tempest-ServerDiagnosticsNegativeTest-1941278843-project-member] Updating inventory in ProviderTree for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 551.106751] env[62096]: DEBUG nova.scheduler.client.report [None req-94f33e3e-2f48-4192-a8c5-afbbc2b01234 tempest-ServerDiagnosticsNegativeTest-1941278843 tempest-ServerDiagnosticsNegativeTest-1941278843-project-member] Refreshing aggregate associations for resource provider 6eefe13c-ab55-4c03-987f-47a62756c3b3, aggregates: None {{(pid=62096) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 551.108912] env[62096]: INFO nova.compute.manager [None req-a07a81da-9645-47fc-94cd-e8687a5b10d0 tempest-ServerActionsTestJSON-232923977 tempest-ServerActionsTestJSON-232923977-project-member] [instance: 018ae11e-bc51-4e0b-8f2e-c464fca6f375] Took 1.07 seconds to deallocate network for instance. [ 551.110662] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a07a81da-9645-47fc-94cd-e8687a5b10d0 tempest-ServerActionsTestJSON-232923977 tempest-ServerActionsTestJSON-232923977-project-member] Expecting reply to msg f032634904b94f38b30f1128eb802bf6 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 551.132908] env[62096]: DEBUG nova.scheduler.client.report [None req-94f33e3e-2f48-4192-a8c5-afbbc2b01234 tempest-ServerDiagnosticsNegativeTest-1941278843 tempest-ServerDiagnosticsNegativeTest-1941278843-project-member] Refreshing trait associations for resource provider 6eefe13c-ab55-4c03-987f-47a62756c3b3, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO {{(pid=62096) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 551.174431] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f032634904b94f38b30f1128eb802bf6 [ 551.305260] env[62096]: DEBUG nova.network.neutron [-] [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 551.305746] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg e5154ebaed4542a3aac6666b9797e286 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 551.315096] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e5154ebaed4542a3aac6666b9797e286 [ 551.327202] env[62096]: DEBUG nova.compute.manager [None req-d85fd46d-4576-455c-a571-1e0033669f66 tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] Starting instance... {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 551.327202] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d85fd46d-4576-455c-a571-1e0033669f66 tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Expecting reply to msg 366d589d56e347c2a88019aa6296a431 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 551.364028] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2f2d123-00be-4b7f-afff-abd9e4d96918 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.369113] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 366d589d56e347c2a88019aa6296a431 [ 551.376032] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c7201a0-2625-4a4c-9b6a-7a6c72872df0 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.420127] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6730033-c7b3-491f-8226-fe3b73ddb63f {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.428857] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cef41c9c-d8b4-49d6-b88b-8d482678432c {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.443703] env[62096]: DEBUG nova.compute.provider_tree [None req-94f33e3e-2f48-4192-a8c5-afbbc2b01234 tempest-ServerDiagnosticsNegativeTest-1941278843 tempest-ServerDiagnosticsNegativeTest-1941278843-project-member] Updating inventory in ProviderTree for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 551.444284] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-94f33e3e-2f48-4192-a8c5-afbbc2b01234 tempest-ServerDiagnosticsNegativeTest-1941278843 tempest-ServerDiagnosticsNegativeTest-1941278843-project-member] Expecting reply to msg 77b6d4d48cb545b3bb7cc9bf193232b4 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 551.454535] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 77b6d4d48cb545b3bb7cc9bf193232b4 [ 551.616172] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a07a81da-9645-47fc-94cd-e8687a5b10d0 tempest-ServerActionsTestJSON-232923977 tempest-ServerActionsTestJSON-232923977-project-member] Expecting reply to msg bc02279700994f8bbd5d338d694ca115 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 551.661686] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bc02279700994f8bbd5d338d694ca115 [ 551.810741] env[62096]: INFO nova.compute.manager [-] [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] Took 1.03 seconds to deallocate network for instance. [ 551.812659] env[62096]: DEBUG nova.compute.claims [None req-f939c6b6-f865-40df-a5aa-fec5b14620ab tempest-InstanceActionsNegativeTestJSON-1063701282 tempest-InstanceActionsNegativeTestJSON-1063701282-project-member] [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] Aborting claim: {{(pid=62096) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 551.812659] env[62096]: DEBUG oslo_concurrency.lockutils [None req-f939c6b6-f865-40df-a5aa-fec5b14620ab tempest-InstanceActionsNegativeTestJSON-1063701282 tempest-InstanceActionsNegativeTestJSON-1063701282-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 551.852309] env[62096]: DEBUG oslo_concurrency.lockutils [None req-d85fd46d-4576-455c-a571-1e0033669f66 tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 552.002241] env[62096]: DEBUG nova.scheduler.client.report [None req-94f33e3e-2f48-4192-a8c5-afbbc2b01234 tempest-ServerDiagnosticsNegativeTest-1941278843 tempest-ServerDiagnosticsNegativeTest-1941278843-project-member] Updated inventory for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 with generation 20 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:957}} [ 552.002498] env[62096]: DEBUG nova.compute.provider_tree [None req-94f33e3e-2f48-4192-a8c5-afbbc2b01234 tempest-ServerDiagnosticsNegativeTest-1941278843 tempest-ServerDiagnosticsNegativeTest-1941278843-project-member] Updating resource provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 generation from 20 to 21 during operation: update_inventory {{(pid=62096) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 552.002672] env[62096]: DEBUG nova.compute.provider_tree [None req-94f33e3e-2f48-4192-a8c5-afbbc2b01234 tempest-ServerDiagnosticsNegativeTest-1941278843 tempest-ServerDiagnosticsNegativeTest-1941278843-project-member] Updating inventory in ProviderTree for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 552.005735] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-94f33e3e-2f48-4192-a8c5-afbbc2b01234 tempest-ServerDiagnosticsNegativeTest-1941278843 tempest-ServerDiagnosticsNegativeTest-1941278843-project-member] Expecting reply to msg 9994140df95841638b94de2cc8afcc75 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 552.017449] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9994140df95841638b94de2cc8afcc75 [ 552.127702] env[62096]: DEBUG nova.compute.manager [req-e9080371-d7ec-48ba-a5ba-b38495de9583 req-b8777607-ae2c-435f-b26b-b5773ded8623 service nova] [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] Received event network-changed-04bdae88-f432-4b2b-bd3c-e8b1341b7a76 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 552.128269] env[62096]: DEBUG nova.compute.manager [req-e9080371-d7ec-48ba-a5ba-b38495de9583 req-b8777607-ae2c-435f-b26b-b5773ded8623 service nova] [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] Refreshing instance network info cache due to event network-changed-04bdae88-f432-4b2b-bd3c-e8b1341b7a76. {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 552.128269] env[62096]: DEBUG oslo_concurrency.lockutils [req-e9080371-d7ec-48ba-a5ba-b38495de9583 req-b8777607-ae2c-435f-b26b-b5773ded8623 service nova] Acquiring lock "refresh_cache-9ebe9a03-3fc2-4a77-bb2b-d220e9693115" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 552.128382] env[62096]: DEBUG oslo_concurrency.lockutils [req-e9080371-d7ec-48ba-a5ba-b38495de9583 req-b8777607-ae2c-435f-b26b-b5773ded8623 service nova] Acquired lock "refresh_cache-9ebe9a03-3fc2-4a77-bb2b-d220e9693115" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 552.128485] env[62096]: DEBUG nova.network.neutron [req-e9080371-d7ec-48ba-a5ba-b38495de9583 req-b8777607-ae2c-435f-b26b-b5773ded8623 service nova] [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] Refreshing network info cache for port 04bdae88-f432-4b2b-bd3c-e8b1341b7a76 {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 552.128891] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-e9080371-d7ec-48ba-a5ba-b38495de9583 req-b8777607-ae2c-435f-b26b-b5773ded8623 service nova] Expecting reply to msg 916bc85260ed4b3fbd7276b10ddb1ef0 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 552.136661] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 916bc85260ed4b3fbd7276b10ddb1ef0 [ 552.146216] env[62096]: INFO nova.scheduler.client.report [None req-a07a81da-9645-47fc-94cd-e8687a5b10d0 tempest-ServerActionsTestJSON-232923977 tempest-ServerActionsTestJSON-232923977-project-member] Deleted allocations for instance 018ae11e-bc51-4e0b-8f2e-c464fca6f375 [ 552.152825] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a07a81da-9645-47fc-94cd-e8687a5b10d0 tempest-ServerActionsTestJSON-232923977 tempest-ServerActionsTestJSON-232923977-project-member] Expecting reply to msg 7363ac92c81b427ab3e8042494130120 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 552.169289] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7363ac92c81b427ab3e8042494130120 [ 552.508990] env[62096]: DEBUG oslo_concurrency.lockutils [None req-94f33e3e-2f48-4192-a8c5-afbbc2b01234 tempest-ServerDiagnosticsNegativeTest-1941278843 tempest-ServerDiagnosticsNegativeTest-1941278843-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 3.756s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 552.509644] env[62096]: ERROR nova.compute.manager [None req-94f33e3e-2f48-4192-a8c5-afbbc2b01234 tempest-ServerDiagnosticsNegativeTest-1941278843 tempest-ServerDiagnosticsNegativeTest-1941278843-project-member] [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port aff25b36-e8c9-4587-8762-0c53e123f24e, please check neutron logs for more information. [ 552.509644] env[62096]: ERROR nova.compute.manager [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] Traceback (most recent call last): [ 552.509644] env[62096]: ERROR nova.compute.manager [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 552.509644] env[62096]: ERROR nova.compute.manager [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] self.driver.spawn(context, instance, image_meta, [ 552.509644] env[62096]: ERROR nova.compute.manager [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 552.509644] env[62096]: ERROR nova.compute.manager [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 552.509644] env[62096]: ERROR nova.compute.manager [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 552.509644] env[62096]: ERROR nova.compute.manager [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] vm_ref = self.build_virtual_machine(instance, [ 552.509644] env[62096]: ERROR nova.compute.manager [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 552.509644] env[62096]: ERROR nova.compute.manager [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] vif_infos = vmwarevif.get_vif_info(self._session, [ 552.509644] env[62096]: ERROR nova.compute.manager [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 552.509949] env[62096]: ERROR nova.compute.manager [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] for vif in network_info: [ 552.509949] env[62096]: ERROR nova.compute.manager [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 552.509949] env[62096]: ERROR nova.compute.manager [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] return self._sync_wrapper(fn, *args, **kwargs) [ 552.509949] env[62096]: ERROR nova.compute.manager [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 552.509949] env[62096]: ERROR nova.compute.manager [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] self.wait() [ 552.509949] env[62096]: ERROR nova.compute.manager [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 552.509949] env[62096]: ERROR nova.compute.manager [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] self[:] = self._gt.wait() [ 552.509949] env[62096]: ERROR nova.compute.manager [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 552.509949] env[62096]: ERROR nova.compute.manager [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] return self._exit_event.wait() [ 552.509949] env[62096]: ERROR nova.compute.manager [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 552.509949] env[62096]: ERROR nova.compute.manager [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] result = hub.switch() [ 552.509949] env[62096]: ERROR nova.compute.manager [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 552.509949] env[62096]: ERROR nova.compute.manager [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] return self.greenlet.switch() [ 552.510294] env[62096]: ERROR nova.compute.manager [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 552.510294] env[62096]: ERROR nova.compute.manager [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] result = function(*args, **kwargs) [ 552.510294] env[62096]: ERROR nova.compute.manager [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 552.510294] env[62096]: ERROR nova.compute.manager [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] return func(*args, **kwargs) [ 552.510294] env[62096]: ERROR nova.compute.manager [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 552.510294] env[62096]: ERROR nova.compute.manager [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] raise e [ 552.510294] env[62096]: ERROR nova.compute.manager [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 552.510294] env[62096]: ERROR nova.compute.manager [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] nwinfo = self.network_api.allocate_for_instance( [ 552.510294] env[62096]: ERROR nova.compute.manager [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 552.510294] env[62096]: ERROR nova.compute.manager [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] created_port_ids = self._update_ports_for_instance( [ 552.510294] env[62096]: ERROR nova.compute.manager [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 552.510294] env[62096]: ERROR nova.compute.manager [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] with excutils.save_and_reraise_exception(): [ 552.510294] env[62096]: ERROR nova.compute.manager [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 552.510637] env[62096]: ERROR nova.compute.manager [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] self.force_reraise() [ 552.510637] env[62096]: ERROR nova.compute.manager [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 552.510637] env[62096]: ERROR nova.compute.manager [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] raise self.value [ 552.510637] env[62096]: ERROR nova.compute.manager [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 552.510637] env[62096]: ERROR nova.compute.manager [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] updated_port = self._update_port( [ 552.510637] env[62096]: ERROR nova.compute.manager [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 552.510637] env[62096]: ERROR nova.compute.manager [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] _ensure_no_port_binding_failure(port) [ 552.510637] env[62096]: ERROR nova.compute.manager [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 552.510637] env[62096]: ERROR nova.compute.manager [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] raise exception.PortBindingFailed(port_id=port['id']) [ 552.510637] env[62096]: ERROR nova.compute.manager [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] nova.exception.PortBindingFailed: Binding failed for port aff25b36-e8c9-4587-8762-0c53e123f24e, please check neutron logs for more information. [ 552.510637] env[62096]: ERROR nova.compute.manager [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] [ 552.510902] env[62096]: DEBUG nova.compute.utils [None req-94f33e3e-2f48-4192-a8c5-afbbc2b01234 tempest-ServerDiagnosticsNegativeTest-1941278843 tempest-ServerDiagnosticsNegativeTest-1941278843-project-member] [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] Binding failed for port aff25b36-e8c9-4587-8762-0c53e123f24e, please check neutron logs for more information. {{(pid=62096) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 552.511565] env[62096]: DEBUG oslo_concurrency.lockutils [None req-a5e0ed8f-7bbe-4249-b49a-d529c1e372e6 tempest-ServerActionsTestOtherB-733566255 tempest-ServerActionsTestOtherB-733566255-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.189s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 552.513401] env[62096]: INFO nova.compute.claims [None req-a5e0ed8f-7bbe-4249-b49a-d529c1e372e6 tempest-ServerActionsTestOtherB-733566255 tempest-ServerActionsTestOtherB-733566255-project-member] [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 552.515130] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a5e0ed8f-7bbe-4249-b49a-d529c1e372e6 tempest-ServerActionsTestOtherB-733566255 tempest-ServerActionsTestOtherB-733566255-project-member] Expecting reply to msg 1367447d7efb42cb9f76dc863727f982 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 552.521021] env[62096]: DEBUG nova.compute.manager [None req-94f33e3e-2f48-4192-a8c5-afbbc2b01234 tempest-ServerDiagnosticsNegativeTest-1941278843 tempest-ServerDiagnosticsNegativeTest-1941278843-project-member] [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] Build of instance 89eb1f17-1d36-4b6f-8102-04708d55a81c was re-scheduled: Binding failed for port aff25b36-e8c9-4587-8762-0c53e123f24e, please check neutron logs for more information. {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 552.521021] env[62096]: DEBUG nova.compute.manager [None req-94f33e3e-2f48-4192-a8c5-afbbc2b01234 tempest-ServerDiagnosticsNegativeTest-1941278843 tempest-ServerDiagnosticsNegativeTest-1941278843-project-member] [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] Unplugging VIFs for instance {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 552.521021] env[62096]: DEBUG oslo_concurrency.lockutils [None req-94f33e3e-2f48-4192-a8c5-afbbc2b01234 tempest-ServerDiagnosticsNegativeTest-1941278843 tempest-ServerDiagnosticsNegativeTest-1941278843-project-member] Acquiring lock "refresh_cache-89eb1f17-1d36-4b6f-8102-04708d55a81c" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 552.521021] env[62096]: DEBUG oslo_concurrency.lockutils [None req-94f33e3e-2f48-4192-a8c5-afbbc2b01234 tempest-ServerDiagnosticsNegativeTest-1941278843 tempest-ServerDiagnosticsNegativeTest-1941278843-project-member] Acquired lock "refresh_cache-89eb1f17-1d36-4b6f-8102-04708d55a81c" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 552.521373] env[62096]: DEBUG nova.network.neutron [None req-94f33e3e-2f48-4192-a8c5-afbbc2b01234 tempest-ServerDiagnosticsNegativeTest-1941278843 tempest-ServerDiagnosticsNegativeTest-1941278843-project-member] [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 552.521373] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-94f33e3e-2f48-4192-a8c5-afbbc2b01234 tempest-ServerDiagnosticsNegativeTest-1941278843 tempest-ServerDiagnosticsNegativeTest-1941278843-project-member] Expecting reply to msg 568f17b1ead14944b1a25df2d7e806f3 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 552.531183] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 568f17b1ead14944b1a25df2d7e806f3 [ 552.572254] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1367447d7efb42cb9f76dc863727f982 [ 552.655615] env[62096]: DEBUG oslo_concurrency.lockutils [None req-a07a81da-9645-47fc-94cd-e8687a5b10d0 tempest-ServerActionsTestJSON-232923977 tempest-ServerActionsTestJSON-232923977-project-member] Lock "018ae11e-bc51-4e0b-8f2e-c464fca6f375" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.289s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 552.656713] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ba7630fb-4f65-44a7-974c-3e7e0c36eacf tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Expecting reply to msg cf0d9009925541b4b3bb7b290b666f26 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 552.674690] env[62096]: DEBUG nova.network.neutron [req-e9080371-d7ec-48ba-a5ba-b38495de9583 req-b8777607-ae2c-435f-b26b-b5773ded8623 service nova] [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 552.676187] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cf0d9009925541b4b3bb7b290b666f26 [ 552.939917] env[62096]: DEBUG nova.network.neutron [req-e9080371-d7ec-48ba-a5ba-b38495de9583 req-b8777607-ae2c-435f-b26b-b5773ded8623 service nova] [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 552.940899] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-e9080371-d7ec-48ba-a5ba-b38495de9583 req-b8777607-ae2c-435f-b26b-b5773ded8623 service nova] Expecting reply to msg 3ad962181ac144fd93624b5e495b50a4 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 552.952351] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3ad962181ac144fd93624b5e495b50a4 [ 553.019449] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a5e0ed8f-7bbe-4249-b49a-d529c1e372e6 tempest-ServerActionsTestOtherB-733566255 tempest-ServerActionsTestOtherB-733566255-project-member] Expecting reply to msg e72ef4df87354cc5aafebf71c3bb46b0 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 553.028311] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e72ef4df87354cc5aafebf71c3bb46b0 [ 553.044816] env[62096]: DEBUG nova.network.neutron [None req-94f33e3e-2f48-4192-a8c5-afbbc2b01234 tempest-ServerDiagnosticsNegativeTest-1941278843 tempest-ServerDiagnosticsNegativeTest-1941278843-project-member] [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 553.096793] env[62096]: DEBUG oslo_concurrency.lockutils [None req-c02b4028-73ad-45a0-8da0-06fbbeeeef87 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Acquiring lock "4fdcbe73-6d7e-4f89-b83d-e6b1664bd317" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 553.097164] env[62096]: DEBUG oslo_concurrency.lockutils [None req-c02b4028-73ad-45a0-8da0-06fbbeeeef87 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Lock "4fdcbe73-6d7e-4f89-b83d-e6b1664bd317" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 553.163253] env[62096]: DEBUG nova.compute.manager [None req-ba7630fb-4f65-44a7-974c-3e7e0c36eacf tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] Starting instance... {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 553.164984] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ba7630fb-4f65-44a7-974c-3e7e0c36eacf tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Expecting reply to msg d5dcd0f77da94a51bf3cb902b04e92e4 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 553.168792] env[62096]: DEBUG nova.network.neutron [None req-94f33e3e-2f48-4192-a8c5-afbbc2b01234 tempest-ServerDiagnosticsNegativeTest-1941278843 tempest-ServerDiagnosticsNegativeTest-1941278843-project-member] [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 553.169255] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-94f33e3e-2f48-4192-a8c5-afbbc2b01234 tempest-ServerDiagnosticsNegativeTest-1941278843 tempest-ServerDiagnosticsNegativeTest-1941278843-project-member] Expecting reply to msg 593cbc3595ce42c994234936e52050b3 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 553.182560] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 593cbc3595ce42c994234936e52050b3 [ 553.243018] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d5dcd0f77da94a51bf3cb902b04e92e4 [ 553.443032] env[62096]: DEBUG oslo_concurrency.lockutils [req-e9080371-d7ec-48ba-a5ba-b38495de9583 req-b8777607-ae2c-435f-b26b-b5773ded8623 service nova] Releasing lock "refresh_cache-9ebe9a03-3fc2-4a77-bb2b-d220e9693115" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 553.443032] env[62096]: DEBUG nova.compute.manager [req-e9080371-d7ec-48ba-a5ba-b38495de9583 req-b8777607-ae2c-435f-b26b-b5773ded8623 service nova] [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] Received event network-vif-deleted-04bdae88-f432-4b2b-bd3c-e8b1341b7a76 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 553.677202] env[62096]: DEBUG oslo_concurrency.lockutils [None req-94f33e3e-2f48-4192-a8c5-afbbc2b01234 tempest-ServerDiagnosticsNegativeTest-1941278843 tempest-ServerDiagnosticsNegativeTest-1941278843-project-member] Releasing lock "refresh_cache-89eb1f17-1d36-4b6f-8102-04708d55a81c" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 553.677474] env[62096]: DEBUG nova.compute.manager [None req-94f33e3e-2f48-4192-a8c5-afbbc2b01234 tempest-ServerDiagnosticsNegativeTest-1941278843 tempest-ServerDiagnosticsNegativeTest-1941278843-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 553.677573] env[62096]: DEBUG nova.compute.manager [None req-94f33e3e-2f48-4192-a8c5-afbbc2b01234 tempest-ServerDiagnosticsNegativeTest-1941278843 tempest-ServerDiagnosticsNegativeTest-1941278843-project-member] [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 553.677733] env[62096]: DEBUG nova.network.neutron [None req-94f33e3e-2f48-4192-a8c5-afbbc2b01234 tempest-ServerDiagnosticsNegativeTest-1941278843 tempest-ServerDiagnosticsNegativeTest-1941278843-project-member] [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 553.701127] env[62096]: DEBUG oslo_concurrency.lockutils [None req-ba7630fb-4f65-44a7-974c-3e7e0c36eacf tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 553.741494] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e694a55b-bc9e-4ce8-a495-61d44313362b {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.749188] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a27572f1-f431-4307-a22d-804b3bb2160b {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.784392] env[62096]: DEBUG nova.network.neutron [None req-94f33e3e-2f48-4192-a8c5-afbbc2b01234 tempest-ServerDiagnosticsNegativeTest-1941278843 tempest-ServerDiagnosticsNegativeTest-1941278843-project-member] [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 553.785191] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-94f33e3e-2f48-4192-a8c5-afbbc2b01234 tempest-ServerDiagnosticsNegativeTest-1941278843 tempest-ServerDiagnosticsNegativeTest-1941278843-project-member] Expecting reply to msg 2e2dfa10f79b45b0bae9581688d6d7c6 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 553.787032] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e439a794-ae33-4d58-87a3-e3e201d16f1a {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.796087] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98cb091c-0e99-4bc1-ab0d-7b3bdb8da328 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.815258] env[62096]: DEBUG nova.compute.provider_tree [None req-a5e0ed8f-7bbe-4249-b49a-d529c1e372e6 tempest-ServerActionsTestOtherB-733566255 tempest-ServerActionsTestOtherB-733566255-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 553.816542] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a5e0ed8f-7bbe-4249-b49a-d529c1e372e6 tempest-ServerActionsTestOtherB-733566255 tempest-ServerActionsTestOtherB-733566255-project-member] Expecting reply to msg 1953e003c38e433fa975b24d9b720319 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 553.818038] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2e2dfa10f79b45b0bae9581688d6d7c6 [ 553.825364] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1953e003c38e433fa975b24d9b720319 [ 554.168553] env[62096]: DEBUG oslo_concurrency.lockutils [None req-bff733c1-b59e-456d-a598-27648ab77c9f tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Acquiring lock "6ac3df26-83ab-4519-a1e2-51286c1d1991" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 554.168790] env[62096]: DEBUG oslo_concurrency.lockutils [None req-bff733c1-b59e-456d-a598-27648ab77c9f tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Lock "6ac3df26-83ab-4519-a1e2-51286c1d1991" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 554.290542] env[62096]: DEBUG nova.network.neutron [None req-94f33e3e-2f48-4192-a8c5-afbbc2b01234 tempest-ServerDiagnosticsNegativeTest-1941278843 tempest-ServerDiagnosticsNegativeTest-1941278843-project-member] [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 554.291083] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-94f33e3e-2f48-4192-a8c5-afbbc2b01234 tempest-ServerDiagnosticsNegativeTest-1941278843 tempest-ServerDiagnosticsNegativeTest-1941278843-project-member] Expecting reply to msg 7897d594668e42789395ad29dcd4e2ef in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 554.300435] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7897d594668e42789395ad29dcd4e2ef [ 554.320112] env[62096]: DEBUG nova.scheduler.client.report [None req-a5e0ed8f-7bbe-4249-b49a-d529c1e372e6 tempest-ServerActionsTestOtherB-733566255 tempest-ServerActionsTestOtherB-733566255-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 554.322420] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a5e0ed8f-7bbe-4249-b49a-d529c1e372e6 tempest-ServerActionsTestOtherB-733566255 tempest-ServerActionsTestOtherB-733566255-project-member] Expecting reply to msg 11f22456f99f4cdd81d3f349fa34e176 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 554.334185] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 11f22456f99f4cdd81d3f349fa34e176 [ 554.796744] env[62096]: INFO nova.compute.manager [None req-94f33e3e-2f48-4192-a8c5-afbbc2b01234 tempest-ServerDiagnosticsNegativeTest-1941278843 tempest-ServerDiagnosticsNegativeTest-1941278843-project-member] [instance: 89eb1f17-1d36-4b6f-8102-04708d55a81c] Took 1.12 seconds to deallocate network for instance. [ 554.796744] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-94f33e3e-2f48-4192-a8c5-afbbc2b01234 tempest-ServerDiagnosticsNegativeTest-1941278843 tempest-ServerDiagnosticsNegativeTest-1941278843-project-member] Expecting reply to msg 8c12f1a0146c4ae4a3141917a8470989 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 554.831804] env[62096]: DEBUG oslo_concurrency.lockutils [None req-a5e0ed8f-7bbe-4249-b49a-d529c1e372e6 tempest-ServerActionsTestOtherB-733566255 tempest-ServerActionsTestOtherB-733566255-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.316s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 554.831804] env[62096]: DEBUG nova.compute.manager [None req-a5e0ed8f-7bbe-4249-b49a-d529c1e372e6 tempest-ServerActionsTestOtherB-733566255 tempest-ServerActionsTestOtherB-733566255-project-member] [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] Start building networks asynchronously for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 554.831804] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a5e0ed8f-7bbe-4249-b49a-d529c1e372e6 tempest-ServerActionsTestOtherB-733566255 tempest-ServerActionsTestOtherB-733566255-project-member] Expecting reply to msg 266cdc607cc64ac595028a04d2e4dea5 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 554.831804] env[62096]: DEBUG oslo_concurrency.lockutils [None req-4d9c1482-ab74-4f69-8438-670cc92231c6 tempest-VolumesAssistedSnapshotsTest-1011581688 tempest-VolumesAssistedSnapshotsTest-1011581688-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.715s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 554.833310] env[62096]: INFO nova.compute.claims [None req-4d9c1482-ab74-4f69-8438-670cc92231c6 tempest-VolumesAssistedSnapshotsTest-1011581688 tempest-VolumesAssistedSnapshotsTest-1011581688-project-member] [instance: 3530e93e-f729-471b-976b-e52c3182cb15] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 554.837773] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4d9c1482-ab74-4f69-8438-670cc92231c6 tempest-VolumesAssistedSnapshotsTest-1011581688 tempest-VolumesAssistedSnapshotsTest-1011581688-project-member] Expecting reply to msg cad93556b9b44c2d99d08fd78957a10c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 554.913403] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 266cdc607cc64ac595028a04d2e4dea5 [ 554.921414] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8c12f1a0146c4ae4a3141917a8470989 [ 554.936542] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cad93556b9b44c2d99d08fd78957a10c [ 555.307098] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-94f33e3e-2f48-4192-a8c5-afbbc2b01234 tempest-ServerDiagnosticsNegativeTest-1941278843 tempest-ServerDiagnosticsNegativeTest-1941278843-project-member] Expecting reply to msg dd4e5236371f4543b87e1fa4428a6bd9 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 555.338327] env[62096]: DEBUG nova.compute.utils [None req-a5e0ed8f-7bbe-4249-b49a-d529c1e372e6 tempest-ServerActionsTestOtherB-733566255 tempest-ServerActionsTestOtherB-733566255-project-member] Using /dev/sd instead of None {{(pid=62096) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 555.338964] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a5e0ed8f-7bbe-4249-b49a-d529c1e372e6 tempest-ServerActionsTestOtherB-733566255 tempest-ServerActionsTestOtherB-733566255-project-member] Expecting reply to msg 8dcf6bb571dd407d98ec7f33a74c32e6 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 555.339855] env[62096]: DEBUG nova.compute.manager [None req-a5e0ed8f-7bbe-4249-b49a-d529c1e372e6 tempest-ServerActionsTestOtherB-733566255 tempest-ServerActionsTestOtherB-733566255-project-member] [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] Allocating IP information in the background. {{(pid=62096) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 555.340199] env[62096]: DEBUG nova.network.neutron [None req-a5e0ed8f-7bbe-4249-b49a-d529c1e372e6 tempest-ServerActionsTestOtherB-733566255 tempest-ServerActionsTestOtherB-733566255-project-member] [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] allocate_for_instance() {{(pid=62096) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 555.343264] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4d9c1482-ab74-4f69-8438-670cc92231c6 tempest-VolumesAssistedSnapshotsTest-1011581688 tempest-VolumesAssistedSnapshotsTest-1011581688-project-member] Expecting reply to msg 1b7efdf791f14b3a95d2da40dbf6e65c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 555.349160] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8dcf6bb571dd407d98ec7f33a74c32e6 [ 555.352314] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1b7efdf791f14b3a95d2da40dbf6e65c [ 555.364054] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dd4e5236371f4543b87e1fa4428a6bd9 [ 555.460565] env[62096]: DEBUG nova.policy [None req-a5e0ed8f-7bbe-4249-b49a-d529c1e372e6 tempest-ServerActionsTestOtherB-733566255 tempest-ServerActionsTestOtherB-733566255-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ce19669d765a4fd69d8a1bce3c6f228a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3bcb2cd658cd4492b8ed223d177b1e1b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62096) authorize /opt/stack/nova/nova/policy.py:203}} [ 555.843057] env[62096]: DEBUG nova.compute.manager [None req-a5e0ed8f-7bbe-4249-b49a-d529c1e372e6 tempest-ServerActionsTestOtherB-733566255 tempest-ServerActionsTestOtherB-733566255-project-member] [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] Start building block device mappings for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 555.845191] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a5e0ed8f-7bbe-4249-b49a-d529c1e372e6 tempest-ServerActionsTestOtherB-733566255 tempest-ServerActionsTestOtherB-733566255-project-member] Expecting reply to msg 37868bf3b1d3448889ee64d516c88057 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 555.921610] env[62096]: INFO nova.scheduler.client.report [None req-94f33e3e-2f48-4192-a8c5-afbbc2b01234 tempest-ServerDiagnosticsNegativeTest-1941278843 tempest-ServerDiagnosticsNegativeTest-1941278843-project-member] Deleted allocations for instance 89eb1f17-1d36-4b6f-8102-04708d55a81c [ 555.936526] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-94f33e3e-2f48-4192-a8c5-afbbc2b01234 tempest-ServerDiagnosticsNegativeTest-1941278843 tempest-ServerDiagnosticsNegativeTest-1941278843-project-member] Expecting reply to msg c82ff2d217c24fddb8ca04124747e64e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 555.952813] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 37868bf3b1d3448889ee64d516c88057 [ 555.958923] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c82ff2d217c24fddb8ca04124747e64e [ 556.102415] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-913b8309-198b-4e7d-bc91-d5b51e48a4bd {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.110593] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48bdeccb-7f32-4c38-be9a-956ddcbd02a5 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.147224] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e7ac556-cdfa-46dc-89ce-ce767d616c8d {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.155772] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b67cde1a-475a-4c7e-8fc0-c5ba0d537aa2 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.175514] env[62096]: DEBUG nova.compute.provider_tree [None req-4d9c1482-ab74-4f69-8438-670cc92231c6 tempest-VolumesAssistedSnapshotsTest-1011581688 tempest-VolumesAssistedSnapshotsTest-1011581688-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 556.176036] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4d9c1482-ab74-4f69-8438-670cc92231c6 tempest-VolumesAssistedSnapshotsTest-1011581688 tempest-VolumesAssistedSnapshotsTest-1011581688-project-member] Expecting reply to msg 6d15db76163c44929311e1f91159ff2e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 556.180566] env[62096]: DEBUG oslo_concurrency.lockutils [None req-8703d55f-3f3e-42bb-b5d4-8086e8f02c90 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Acquiring lock "3855c98d-4ade-4f6f-85aa-1297df5a39a9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 556.180688] env[62096]: DEBUG oslo_concurrency.lockutils [None req-8703d55f-3f3e-42bb-b5d4-8086e8f02c90 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Lock "3855c98d-4ade-4f6f-85aa-1297df5a39a9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 556.188701] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6d15db76163c44929311e1f91159ff2e [ 556.353087] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a5e0ed8f-7bbe-4249-b49a-d529c1e372e6 tempest-ServerActionsTestOtherB-733566255 tempest-ServerActionsTestOtherB-733566255-project-member] Expecting reply to msg 624045c03adf434bb1cbbb3e635ec4e4 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 556.395079] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 624045c03adf434bb1cbbb3e635ec4e4 [ 556.435208] env[62096]: DEBUG oslo_concurrency.lockutils [None req-94f33e3e-2f48-4192-a8c5-afbbc2b01234 tempest-ServerDiagnosticsNegativeTest-1941278843 tempest-ServerDiagnosticsNegativeTest-1941278843-project-member] Lock "89eb1f17-1d36-4b6f-8102-04708d55a81c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.466s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 556.435796] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c1291365-a5cc-4752-acce-920ad54dbd71 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Expecting reply to msg 3d55360d2f4a4b98bc3df5e2c449609b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 556.448381] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3d55360d2f4a4b98bc3df5e2c449609b [ 556.590284] env[62096]: DEBUG nova.network.neutron [None req-a5e0ed8f-7bbe-4249-b49a-d529c1e372e6 tempest-ServerActionsTestOtherB-733566255 tempest-ServerActionsTestOtherB-733566255-project-member] [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] Successfully created port: 0198948c-99f9-4d41-9309-650d6ba5c5cc {{(pid=62096) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 556.681590] env[62096]: DEBUG nova.scheduler.client.report [None req-4d9c1482-ab74-4f69-8438-670cc92231c6 tempest-VolumesAssistedSnapshotsTest-1011581688 tempest-VolumesAssistedSnapshotsTest-1011581688-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 556.684059] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4d9c1482-ab74-4f69-8438-670cc92231c6 tempest-VolumesAssistedSnapshotsTest-1011581688 tempest-VolumesAssistedSnapshotsTest-1011581688-project-member] Expecting reply to msg b60cd89bc87e4c7cb6ec25103f86d22d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 556.696857] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b60cd89bc87e4c7cb6ec25103f86d22d [ 556.856994] env[62096]: DEBUG nova.compute.manager [None req-a5e0ed8f-7bbe-4249-b49a-d529c1e372e6 tempest-ServerActionsTestOtherB-733566255 tempest-ServerActionsTestOtherB-733566255-project-member] [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] Start spawning the instance on the hypervisor. {{(pid=62096) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 556.889670] env[62096]: DEBUG nova.virt.hardware [None req-a5e0ed8f-7bbe-4249-b49a-d529c1e372e6 tempest-ServerActionsTestOtherB-733566255 tempest-ServerActionsTestOtherB-733566255-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-19T11:52:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-19T11:51:55Z,direct_url=,disk_format='vmdk',id=fb764baa-3805-45c4-a694-aa91b0932110,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bd76085d004b408bbcf1ebf76ba2593e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-19T11:51:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 556.889907] env[62096]: DEBUG nova.virt.hardware [None req-a5e0ed8f-7bbe-4249-b49a-d529c1e372e6 tempest-ServerActionsTestOtherB-733566255 tempest-ServerActionsTestOtherB-733566255-project-member] Flavor limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 556.890064] env[62096]: DEBUG nova.virt.hardware [None req-a5e0ed8f-7bbe-4249-b49a-d529c1e372e6 tempest-ServerActionsTestOtherB-733566255 tempest-ServerActionsTestOtherB-733566255-project-member] Image limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 556.890245] env[62096]: DEBUG nova.virt.hardware [None req-a5e0ed8f-7bbe-4249-b49a-d529c1e372e6 tempest-ServerActionsTestOtherB-733566255 tempest-ServerActionsTestOtherB-733566255-project-member] Flavor pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 556.890389] env[62096]: DEBUG nova.virt.hardware [None req-a5e0ed8f-7bbe-4249-b49a-d529c1e372e6 tempest-ServerActionsTestOtherB-733566255 tempest-ServerActionsTestOtherB-733566255-project-member] Image pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 556.890535] env[62096]: DEBUG nova.virt.hardware [None req-a5e0ed8f-7bbe-4249-b49a-d529c1e372e6 tempest-ServerActionsTestOtherB-733566255 tempest-ServerActionsTestOtherB-733566255-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 556.890742] env[62096]: DEBUG nova.virt.hardware [None req-a5e0ed8f-7bbe-4249-b49a-d529c1e372e6 tempest-ServerActionsTestOtherB-733566255 tempest-ServerActionsTestOtherB-733566255-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 556.890900] env[62096]: DEBUG nova.virt.hardware [None req-a5e0ed8f-7bbe-4249-b49a-d529c1e372e6 tempest-ServerActionsTestOtherB-733566255 tempest-ServerActionsTestOtherB-733566255-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 556.891104] env[62096]: DEBUG nova.virt.hardware [None req-a5e0ed8f-7bbe-4249-b49a-d529c1e372e6 tempest-ServerActionsTestOtherB-733566255 tempest-ServerActionsTestOtherB-733566255-project-member] Got 1 possible topologies {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 556.891210] env[62096]: DEBUG nova.virt.hardware [None req-a5e0ed8f-7bbe-4249-b49a-d529c1e372e6 tempest-ServerActionsTestOtherB-733566255 tempest-ServerActionsTestOtherB-733566255-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 556.891378] env[62096]: DEBUG nova.virt.hardware [None req-a5e0ed8f-7bbe-4249-b49a-d529c1e372e6 tempest-ServerActionsTestOtherB-733566255 tempest-ServerActionsTestOtherB-733566255-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 556.892462] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16624429-aa19-432e-a381-5bb62a098f2f {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.902461] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bebded69-e229-42f3-a405-a0ee92401d9b {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.939019] env[62096]: DEBUG nova.compute.manager [None req-c1291365-a5cc-4752-acce-920ad54dbd71 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] Starting instance... {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 556.941385] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c1291365-a5cc-4752-acce-920ad54dbd71 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Expecting reply to msg e2e4beaa78e44bc88994f5cb8a591b25 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 556.977844] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e2e4beaa78e44bc88994f5cb8a591b25 [ 557.191202] env[62096]: DEBUG oslo_concurrency.lockutils [None req-4d9c1482-ab74-4f69-8438-670cc92231c6 tempest-VolumesAssistedSnapshotsTest-1011581688 tempest-VolumesAssistedSnapshotsTest-1011581688-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.356s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 557.191202] env[62096]: DEBUG nova.compute.manager [None req-4d9c1482-ab74-4f69-8438-670cc92231c6 tempest-VolumesAssistedSnapshotsTest-1011581688 tempest-VolumesAssistedSnapshotsTest-1011581688-project-member] [instance: 3530e93e-f729-471b-976b-e52c3182cb15] Start building networks asynchronously for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 557.191202] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4d9c1482-ab74-4f69-8438-670cc92231c6 tempest-VolumesAssistedSnapshotsTest-1011581688 tempest-VolumesAssistedSnapshotsTest-1011581688-project-member] Expecting reply to msg 0297a2d45aab452aa75bc46ed0b8892c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 557.191202] env[62096]: DEBUG oslo_concurrency.lockutils [None req-7dd77095-b2e2-4af3-8029-5e47b60be2ee tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 13.341s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 557.193273] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-7dd77095-b2e2-4af3-8029-5e47b60be2ee tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Expecting reply to msg 63f34aa4614a4198b3d9669ea0fd2de5 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 557.231028] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0297a2d45aab452aa75bc46ed0b8892c [ 557.254819] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 63f34aa4614a4198b3d9669ea0fd2de5 [ 557.469607] env[62096]: DEBUG oslo_concurrency.lockutils [None req-c1291365-a5cc-4752-acce-920ad54dbd71 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 557.698186] env[62096]: DEBUG nova.compute.utils [None req-4d9c1482-ab74-4f69-8438-670cc92231c6 tempest-VolumesAssistedSnapshotsTest-1011581688 tempest-VolumesAssistedSnapshotsTest-1011581688-project-member] Using /dev/sd instead of None {{(pid=62096) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 557.698330] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4d9c1482-ab74-4f69-8438-670cc92231c6 tempest-VolumesAssistedSnapshotsTest-1011581688 tempest-VolumesAssistedSnapshotsTest-1011581688-project-member] Expecting reply to msg 0039903461284183b16b2fbfe6a5acf6 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 557.702547] env[62096]: DEBUG nova.compute.manager [None req-4d9c1482-ab74-4f69-8438-670cc92231c6 tempest-VolumesAssistedSnapshotsTest-1011581688 tempest-VolumesAssistedSnapshotsTest-1011581688-project-member] [instance: 3530e93e-f729-471b-976b-e52c3182cb15] Allocating IP information in the background. {{(pid=62096) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 557.702652] env[62096]: DEBUG nova.network.neutron [None req-4d9c1482-ab74-4f69-8438-670cc92231c6 tempest-VolumesAssistedSnapshotsTest-1011581688 tempest-VolumesAssistedSnapshotsTest-1011581688-project-member] [instance: 3530e93e-f729-471b-976b-e52c3182cb15] allocate_for_instance() {{(pid=62096) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 557.716605] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0039903461284183b16b2fbfe6a5acf6 [ 557.889226] env[62096]: DEBUG nova.policy [None req-4d9c1482-ab74-4f69-8438-670cc92231c6 tempest-VolumesAssistedSnapshotsTest-1011581688 tempest-VolumesAssistedSnapshotsTest-1011581688-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b7f991cd86404315b05769d273948008', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ed0c2abf35284cd3acf5f5ee1c7ed5e7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62096) authorize /opt/stack/nova/nova/policy.py:203}} [ 557.966872] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4402fba2-a9aa-40c8-a264-74e66a3c4fee {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.974746] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-060e6c34-301a-4036-8060-c30521c600f3 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.006327] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0d4faef-1f8d-4c4b-9d1d-f7dd868a5b1d {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.014153] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8109bc35-1d30-4b6a-ad34-5453161e738f {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.027772] env[62096]: DEBUG nova.compute.provider_tree [None req-7dd77095-b2e2-4af3-8029-5e47b60be2ee tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 558.027959] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-7dd77095-b2e2-4af3-8029-5e47b60be2ee tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Expecting reply to msg 1c3af27f50eb49bfa5aadce06921136b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 558.041091] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1c3af27f50eb49bfa5aadce06921136b [ 558.107910] env[62096]: DEBUG oslo_concurrency.lockutils [None req-dbe77661-ff6b-4cc0-8cbb-e6cfd25c54ab tempest-ServerDiagnosticsTest-1671101402 tempest-ServerDiagnosticsTest-1671101402-project-member] Acquiring lock "9bc49e7a-af22-4112-8609-348605599692" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 558.108209] env[62096]: DEBUG oslo_concurrency.lockutils [None req-dbe77661-ff6b-4cc0-8cbb-e6cfd25c54ab tempest-ServerDiagnosticsTest-1671101402 tempest-ServerDiagnosticsTest-1671101402-project-member] Lock "9bc49e7a-af22-4112-8609-348605599692" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 558.123527] env[62096]: DEBUG oslo_concurrency.lockutils [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Acquiring lock "12a4b44d-b6aa-45e2-bba9-d73f41fa4b61" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 558.123749] env[62096]: DEBUG oslo_concurrency.lockutils [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Lock "12a4b44d-b6aa-45e2-bba9-d73f41fa4b61" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 558.203573] env[62096]: DEBUG nova.compute.manager [None req-4d9c1482-ab74-4f69-8438-670cc92231c6 tempest-VolumesAssistedSnapshotsTest-1011581688 tempest-VolumesAssistedSnapshotsTest-1011581688-project-member] [instance: 3530e93e-f729-471b-976b-e52c3182cb15] Start building block device mappings for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 558.206427] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4d9c1482-ab74-4f69-8438-670cc92231c6 tempest-VolumesAssistedSnapshotsTest-1011581688 tempest-VolumesAssistedSnapshotsTest-1011581688-project-member] Expecting reply to msg ee7889f6db0a4e4f8af65c0e26983598 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 558.258570] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ee7889f6db0a4e4f8af65c0e26983598 [ 558.530570] env[62096]: DEBUG nova.scheduler.client.report [None req-7dd77095-b2e2-4af3-8029-5e47b60be2ee tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 558.533010] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-7dd77095-b2e2-4af3-8029-5e47b60be2ee tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Expecting reply to msg fcd1abe26d0e4fdf8bf5dae5b2187f66 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 558.547752] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fcd1abe26d0e4fdf8bf5dae5b2187f66 [ 558.714866] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4d9c1482-ab74-4f69-8438-670cc92231c6 tempest-VolumesAssistedSnapshotsTest-1011581688 tempest-VolumesAssistedSnapshotsTest-1011581688-project-member] Expecting reply to msg 684764577e3b4d27946deaa99a51bd34 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 558.764069] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 684764577e3b4d27946deaa99a51bd34 [ 559.035609] env[62096]: DEBUG oslo_concurrency.lockutils [None req-7dd77095-b2e2-4af3-8029-5e47b60be2ee tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.845s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 559.036238] env[62096]: ERROR nova.compute.manager [None req-7dd77095-b2e2-4af3-8029-5e47b60be2ee tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 61b8e775-6352-4a95-8d43-acb5384ff45e, please check neutron logs for more information. [ 559.036238] env[62096]: ERROR nova.compute.manager [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] Traceback (most recent call last): [ 559.036238] env[62096]: ERROR nova.compute.manager [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 559.036238] env[62096]: ERROR nova.compute.manager [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] self.driver.spawn(context, instance, image_meta, [ 559.036238] env[62096]: ERROR nova.compute.manager [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 559.036238] env[62096]: ERROR nova.compute.manager [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] self._vmops.spawn(context, instance, image_meta, injected_files, [ 559.036238] env[62096]: ERROR nova.compute.manager [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 559.036238] env[62096]: ERROR nova.compute.manager [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] vm_ref = self.build_virtual_machine(instance, [ 559.036238] env[62096]: ERROR nova.compute.manager [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 559.036238] env[62096]: ERROR nova.compute.manager [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] vif_infos = vmwarevif.get_vif_info(self._session, [ 559.036238] env[62096]: ERROR nova.compute.manager [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 559.036540] env[62096]: ERROR nova.compute.manager [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] for vif in network_info: [ 559.036540] env[62096]: ERROR nova.compute.manager [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 559.036540] env[62096]: ERROR nova.compute.manager [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] return self._sync_wrapper(fn, *args, **kwargs) [ 559.036540] env[62096]: ERROR nova.compute.manager [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 559.036540] env[62096]: ERROR nova.compute.manager [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] self.wait() [ 559.036540] env[62096]: ERROR nova.compute.manager [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 559.036540] env[62096]: ERROR nova.compute.manager [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] self[:] = self._gt.wait() [ 559.036540] env[62096]: ERROR nova.compute.manager [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 559.036540] env[62096]: ERROR nova.compute.manager [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] return self._exit_event.wait() [ 559.036540] env[62096]: ERROR nova.compute.manager [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 559.036540] env[62096]: ERROR nova.compute.manager [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] result = hub.switch() [ 559.036540] env[62096]: ERROR nova.compute.manager [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 559.036540] env[62096]: ERROR nova.compute.manager [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] return self.greenlet.switch() [ 559.036855] env[62096]: ERROR nova.compute.manager [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 559.036855] env[62096]: ERROR nova.compute.manager [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] result = function(*args, **kwargs) [ 559.036855] env[62096]: ERROR nova.compute.manager [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 559.036855] env[62096]: ERROR nova.compute.manager [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] return func(*args, **kwargs) [ 559.036855] env[62096]: ERROR nova.compute.manager [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 559.036855] env[62096]: ERROR nova.compute.manager [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] raise e [ 559.036855] env[62096]: ERROR nova.compute.manager [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 559.036855] env[62096]: ERROR nova.compute.manager [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] nwinfo = self.network_api.allocate_for_instance( [ 559.036855] env[62096]: ERROR nova.compute.manager [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 559.036855] env[62096]: ERROR nova.compute.manager [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] created_port_ids = self._update_ports_for_instance( [ 559.036855] env[62096]: ERROR nova.compute.manager [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 559.036855] env[62096]: ERROR nova.compute.manager [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] with excutils.save_and_reraise_exception(): [ 559.036855] env[62096]: ERROR nova.compute.manager [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 559.037987] env[62096]: ERROR nova.compute.manager [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] self.force_reraise() [ 559.037987] env[62096]: ERROR nova.compute.manager [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 559.037987] env[62096]: ERROR nova.compute.manager [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] raise self.value [ 559.037987] env[62096]: ERROR nova.compute.manager [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 559.037987] env[62096]: ERROR nova.compute.manager [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] updated_port = self._update_port( [ 559.037987] env[62096]: ERROR nova.compute.manager [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 559.037987] env[62096]: ERROR nova.compute.manager [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] _ensure_no_port_binding_failure(port) [ 559.037987] env[62096]: ERROR nova.compute.manager [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 559.037987] env[62096]: ERROR nova.compute.manager [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] raise exception.PortBindingFailed(port_id=port['id']) [ 559.037987] env[62096]: ERROR nova.compute.manager [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] nova.exception.PortBindingFailed: Binding failed for port 61b8e775-6352-4a95-8d43-acb5384ff45e, please check neutron logs for more information. [ 559.037987] env[62096]: ERROR nova.compute.manager [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] [ 559.038344] env[62096]: DEBUG nova.compute.utils [None req-7dd77095-b2e2-4af3-8029-5e47b60be2ee tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] Binding failed for port 61b8e775-6352-4a95-8d43-acb5384ff45e, please check neutron logs for more information. {{(pid=62096) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 559.038344] env[62096]: DEBUG oslo_concurrency.lockutils [None req-4495e51c-6f81-4215-8c28-fb72754e88b3 tempest-ServersAdminNegativeTestJSON-706848671 tempest-ServersAdminNegativeTestJSON-706848671-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 12.456s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 559.040282] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4495e51c-6f81-4215-8c28-fb72754e88b3 tempest-ServersAdminNegativeTestJSON-706848671 tempest-ServersAdminNegativeTestJSON-706848671-project-member] Expecting reply to msg f3536f3aa41847fb99a2774d300b7252 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 559.041612] env[62096]: DEBUG nova.compute.manager [None req-7dd77095-b2e2-4af3-8029-5e47b60be2ee tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] Build of instance 5ea9e99b-dffd-4b11-97ff-634551c00745 was re-scheduled: Binding failed for port 61b8e775-6352-4a95-8d43-acb5384ff45e, please check neutron logs for more information. {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 559.041925] env[62096]: DEBUG nova.compute.manager [None req-7dd77095-b2e2-4af3-8029-5e47b60be2ee tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] Unplugging VIFs for instance {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 559.042148] env[62096]: DEBUG oslo_concurrency.lockutils [None req-7dd77095-b2e2-4af3-8029-5e47b60be2ee tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Acquiring lock "refresh_cache-5ea9e99b-dffd-4b11-97ff-634551c00745" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 559.042297] env[62096]: DEBUG oslo_concurrency.lockutils [None req-7dd77095-b2e2-4af3-8029-5e47b60be2ee tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Acquired lock "refresh_cache-5ea9e99b-dffd-4b11-97ff-634551c00745" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 559.042492] env[62096]: DEBUG nova.network.neutron [None req-7dd77095-b2e2-4af3-8029-5e47b60be2ee tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 559.043023] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-7dd77095-b2e2-4af3-8029-5e47b60be2ee tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Expecting reply to msg cf77f051c89e4e9cbcc431d56b3f75ab in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 559.051268] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cf77f051c89e4e9cbcc431d56b3f75ab [ 559.096124] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f3536f3aa41847fb99a2774d300b7252 [ 559.103914] env[62096]: DEBUG nova.network.neutron [None req-4d9c1482-ab74-4f69-8438-670cc92231c6 tempest-VolumesAssistedSnapshotsTest-1011581688 tempest-VolumesAssistedSnapshotsTest-1011581688-project-member] [instance: 3530e93e-f729-471b-976b-e52c3182cb15] Successfully created port: 5fdd06cc-1563-494e-a278-34fb92f36b0a {{(pid=62096) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 559.217860] env[62096]: DEBUG nova.compute.manager [None req-4d9c1482-ab74-4f69-8438-670cc92231c6 tempest-VolumesAssistedSnapshotsTest-1011581688 tempest-VolumesAssistedSnapshotsTest-1011581688-project-member] [instance: 3530e93e-f729-471b-976b-e52c3182cb15] Start spawning the instance on the hypervisor. {{(pid=62096) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 559.245374] env[62096]: DEBUG nova.virt.hardware [None req-4d9c1482-ab74-4f69-8438-670cc92231c6 tempest-VolumesAssistedSnapshotsTest-1011581688 tempest-VolumesAssistedSnapshotsTest-1011581688-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-19T11:52:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-19T11:51:55Z,direct_url=,disk_format='vmdk',id=fb764baa-3805-45c4-a694-aa91b0932110,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bd76085d004b408bbcf1ebf76ba2593e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-19T11:51:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 559.245746] env[62096]: DEBUG nova.virt.hardware [None req-4d9c1482-ab74-4f69-8438-670cc92231c6 tempest-VolumesAssistedSnapshotsTest-1011581688 tempest-VolumesAssistedSnapshotsTest-1011581688-project-member] Flavor limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 559.245922] env[62096]: DEBUG nova.virt.hardware [None req-4d9c1482-ab74-4f69-8438-670cc92231c6 tempest-VolumesAssistedSnapshotsTest-1011581688 tempest-VolumesAssistedSnapshotsTest-1011581688-project-member] Image limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 559.246112] env[62096]: DEBUG nova.virt.hardware [None req-4d9c1482-ab74-4f69-8438-670cc92231c6 tempest-VolumesAssistedSnapshotsTest-1011581688 tempest-VolumesAssistedSnapshotsTest-1011581688-project-member] Flavor pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 559.246309] env[62096]: DEBUG nova.virt.hardware [None req-4d9c1482-ab74-4f69-8438-670cc92231c6 tempest-VolumesAssistedSnapshotsTest-1011581688 tempest-VolumesAssistedSnapshotsTest-1011581688-project-member] Image pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 559.249878] env[62096]: DEBUG nova.virt.hardware [None req-4d9c1482-ab74-4f69-8438-670cc92231c6 tempest-VolumesAssistedSnapshotsTest-1011581688 tempest-VolumesAssistedSnapshotsTest-1011581688-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 559.250066] env[62096]: DEBUG nova.virt.hardware [None req-4d9c1482-ab74-4f69-8438-670cc92231c6 tempest-VolumesAssistedSnapshotsTest-1011581688 tempest-VolumesAssistedSnapshotsTest-1011581688-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 559.250246] env[62096]: DEBUG nova.virt.hardware [None req-4d9c1482-ab74-4f69-8438-670cc92231c6 tempest-VolumesAssistedSnapshotsTest-1011581688 tempest-VolumesAssistedSnapshotsTest-1011581688-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 559.250428] env[62096]: DEBUG nova.virt.hardware [None req-4d9c1482-ab74-4f69-8438-670cc92231c6 tempest-VolumesAssistedSnapshotsTest-1011581688 tempest-VolumesAssistedSnapshotsTest-1011581688-project-member] Got 1 possible topologies {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 559.250751] env[62096]: DEBUG nova.virt.hardware [None req-4d9c1482-ab74-4f69-8438-670cc92231c6 tempest-VolumesAssistedSnapshotsTest-1011581688 tempest-VolumesAssistedSnapshotsTest-1011581688-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 559.250751] env[62096]: DEBUG nova.virt.hardware [None req-4d9c1482-ab74-4f69-8438-670cc92231c6 tempest-VolumesAssistedSnapshotsTest-1011581688 tempest-VolumesAssistedSnapshotsTest-1011581688-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 559.251658] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58d886ff-0275-44ce-b7ef-b7eec945320a {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.260145] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b6d74d4-4998-4f2e-9c10-a155ec95a63c {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.597563] env[62096]: DEBUG nova.network.neutron [None req-7dd77095-b2e2-4af3-8029-5e47b60be2ee tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 559.785454] env[62096]: DEBUG nova.network.neutron [None req-7dd77095-b2e2-4af3-8029-5e47b60be2ee tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 559.785454] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-7dd77095-b2e2-4af3-8029-5e47b60be2ee tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Expecting reply to msg 6349aa755d454a66af6a89718b4b5b9b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 559.796652] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6349aa755d454a66af6a89718b4b5b9b [ 559.839527] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0d1b70f-ebbe-4a29-aa2f-a808b71471d0 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.847246] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-595e08e3-ea0a-4dec-b7be-37db83956d72 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.884591] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bf93b22-1568-4a1f-9c4a-2376c18c6ccf {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.901996] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32228fdd-fc51-4c50-b993-96e21a23b46a {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.923425] env[62096]: DEBUG nova.compute.provider_tree [None req-4495e51c-6f81-4215-8c28-fb72754e88b3 tempest-ServersAdminNegativeTestJSON-706848671 tempest-ServersAdminNegativeTestJSON-706848671-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 559.924063] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4495e51c-6f81-4215-8c28-fb72754e88b3 tempest-ServersAdminNegativeTestJSON-706848671 tempest-ServersAdminNegativeTestJSON-706848671-project-member] Expecting reply to msg eef8972c3321434d9aa88e620a796e51 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 559.934222] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg eef8972c3321434d9aa88e620a796e51 [ 560.290502] env[62096]: DEBUG oslo_concurrency.lockutils [None req-7dd77095-b2e2-4af3-8029-5e47b60be2ee tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Releasing lock "refresh_cache-5ea9e99b-dffd-4b11-97ff-634551c00745" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 560.290796] env[62096]: DEBUG nova.compute.manager [None req-7dd77095-b2e2-4af3-8029-5e47b60be2ee tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 560.290891] env[62096]: DEBUG nova.compute.manager [None req-7dd77095-b2e2-4af3-8029-5e47b60be2ee tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 560.291051] env[62096]: DEBUG nova.network.neutron [None req-7dd77095-b2e2-4af3-8029-5e47b60be2ee tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 560.331740] env[62096]: DEBUG nova.network.neutron [None req-7dd77095-b2e2-4af3-8029-5e47b60be2ee tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 560.332394] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-7dd77095-b2e2-4af3-8029-5e47b60be2ee tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Expecting reply to msg 663c9a7566824cf8babdb326a88c29a1 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 560.339638] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 663c9a7566824cf8babdb326a88c29a1 [ 560.427321] env[62096]: DEBUG nova.scheduler.client.report [None req-4495e51c-6f81-4215-8c28-fb72754e88b3 tempest-ServersAdminNegativeTestJSON-706848671 tempest-ServersAdminNegativeTestJSON-706848671-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 560.429641] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4495e51c-6f81-4215-8c28-fb72754e88b3 tempest-ServersAdminNegativeTestJSON-706848671 tempest-ServersAdminNegativeTestJSON-706848671-project-member] Expecting reply to msg 7c1b474e3bb34f309514eecb5cdaddc5 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 560.441384] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7c1b474e3bb34f309514eecb5cdaddc5 [ 560.835201] env[62096]: DEBUG nova.network.neutron [None req-7dd77095-b2e2-4af3-8029-5e47b60be2ee tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 560.835715] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-7dd77095-b2e2-4af3-8029-5e47b60be2ee tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Expecting reply to msg dcd79b0b856348fdb54ee36fbb07f302 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 560.845479] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dcd79b0b856348fdb54ee36fbb07f302 [ 560.933532] env[62096]: DEBUG oslo_concurrency.lockutils [None req-4495e51c-6f81-4215-8c28-fb72754e88b3 tempest-ServersAdminNegativeTestJSON-706848671 tempest-ServersAdminNegativeTestJSON-706848671-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.894s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 560.933532] env[62096]: ERROR nova.compute.manager [None req-4495e51c-6f81-4215-8c28-fb72754e88b3 tempest-ServersAdminNegativeTestJSON-706848671 tempest-ServersAdminNegativeTestJSON-706848671-project-member] [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port cc3376b5-d3cb-420a-804b-c6a03e40dccd, please check neutron logs for more information. [ 560.933532] env[62096]: ERROR nova.compute.manager [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] Traceback (most recent call last): [ 560.933532] env[62096]: ERROR nova.compute.manager [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 560.933532] env[62096]: ERROR nova.compute.manager [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] self.driver.spawn(context, instance, image_meta, [ 560.933532] env[62096]: ERROR nova.compute.manager [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 560.933532] env[62096]: ERROR nova.compute.manager [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 560.933532] env[62096]: ERROR nova.compute.manager [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 560.933532] env[62096]: ERROR nova.compute.manager [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] vm_ref = self.build_virtual_machine(instance, [ 560.933820] env[62096]: ERROR nova.compute.manager [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 560.933820] env[62096]: ERROR nova.compute.manager [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] vif_infos = vmwarevif.get_vif_info(self._session, [ 560.933820] env[62096]: ERROR nova.compute.manager [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 560.933820] env[62096]: ERROR nova.compute.manager [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] for vif in network_info: [ 560.933820] env[62096]: ERROR nova.compute.manager [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 560.933820] env[62096]: ERROR nova.compute.manager [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] return self._sync_wrapper(fn, *args, **kwargs) [ 560.933820] env[62096]: ERROR nova.compute.manager [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 560.933820] env[62096]: ERROR nova.compute.manager [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] self.wait() [ 560.933820] env[62096]: ERROR nova.compute.manager [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 560.933820] env[62096]: ERROR nova.compute.manager [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] self[:] = self._gt.wait() [ 560.933820] env[62096]: ERROR nova.compute.manager [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 560.933820] env[62096]: ERROR nova.compute.manager [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] return self._exit_event.wait() [ 560.933820] env[62096]: ERROR nova.compute.manager [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 560.934160] env[62096]: ERROR nova.compute.manager [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] result = hub.switch() [ 560.934160] env[62096]: ERROR nova.compute.manager [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 560.934160] env[62096]: ERROR nova.compute.manager [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] return self.greenlet.switch() [ 560.934160] env[62096]: ERROR nova.compute.manager [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 560.934160] env[62096]: ERROR nova.compute.manager [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] result = function(*args, **kwargs) [ 560.934160] env[62096]: ERROR nova.compute.manager [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 560.934160] env[62096]: ERROR nova.compute.manager [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] return func(*args, **kwargs) [ 560.934160] env[62096]: ERROR nova.compute.manager [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 560.934160] env[62096]: ERROR nova.compute.manager [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] raise e [ 560.934160] env[62096]: ERROR nova.compute.manager [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 560.934160] env[62096]: ERROR nova.compute.manager [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] nwinfo = self.network_api.allocate_for_instance( [ 560.934160] env[62096]: ERROR nova.compute.manager [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 560.934160] env[62096]: ERROR nova.compute.manager [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] created_port_ids = self._update_ports_for_instance( [ 560.934504] env[62096]: ERROR nova.compute.manager [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 560.934504] env[62096]: ERROR nova.compute.manager [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] with excutils.save_and_reraise_exception(): [ 560.934504] env[62096]: ERROR nova.compute.manager [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 560.934504] env[62096]: ERROR nova.compute.manager [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] self.force_reraise() [ 560.934504] env[62096]: ERROR nova.compute.manager [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 560.934504] env[62096]: ERROR nova.compute.manager [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] raise self.value [ 560.934504] env[62096]: ERROR nova.compute.manager [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 560.934504] env[62096]: ERROR nova.compute.manager [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] updated_port = self._update_port( [ 560.934504] env[62096]: ERROR nova.compute.manager [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 560.934504] env[62096]: ERROR nova.compute.manager [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] _ensure_no_port_binding_failure(port) [ 560.934504] env[62096]: ERROR nova.compute.manager [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 560.934504] env[62096]: ERROR nova.compute.manager [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] raise exception.PortBindingFailed(port_id=port['id']) [ 560.934809] env[62096]: ERROR nova.compute.manager [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] nova.exception.PortBindingFailed: Binding failed for port cc3376b5-d3cb-420a-804b-c6a03e40dccd, please check neutron logs for more information. [ 560.934809] env[62096]: ERROR nova.compute.manager [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] [ 560.934809] env[62096]: DEBUG nova.compute.utils [None req-4495e51c-6f81-4215-8c28-fb72754e88b3 tempest-ServersAdminNegativeTestJSON-706848671 tempest-ServersAdminNegativeTestJSON-706848671-project-member] [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] Binding failed for port cc3376b5-d3cb-420a-804b-c6a03e40dccd, please check neutron logs for more information. {{(pid=62096) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 560.934809] env[62096]: DEBUG oslo_concurrency.lockutils [None req-530f9427-aadb-4fc8-9c2d-063e58e29f49 tempest-FloatingIPsAssociationTestJSON-1970391969 tempest-FloatingIPsAssociationTestJSON-1970391969-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.168s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 560.936696] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-530f9427-aadb-4fc8-9c2d-063e58e29f49 tempest-FloatingIPsAssociationTestJSON-1970391969 tempest-FloatingIPsAssociationTestJSON-1970391969-project-member] Expecting reply to msg d13fb3d3dce44c0ba490d7f3de02d5de in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 560.938022] env[62096]: DEBUG nova.compute.manager [None req-4495e51c-6f81-4215-8c28-fb72754e88b3 tempest-ServersAdminNegativeTestJSON-706848671 tempest-ServersAdminNegativeTestJSON-706848671-project-member] [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] Build of instance ebc7dc27-110e-4edc-87be-cb0ecdbe215f was re-scheduled: Binding failed for port cc3376b5-d3cb-420a-804b-c6a03e40dccd, please check neutron logs for more information. {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 560.938641] env[62096]: DEBUG nova.compute.manager [None req-4495e51c-6f81-4215-8c28-fb72754e88b3 tempest-ServersAdminNegativeTestJSON-706848671 tempest-ServersAdminNegativeTestJSON-706848671-project-member] [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] Unplugging VIFs for instance {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 560.938863] env[62096]: DEBUG oslo_concurrency.lockutils [None req-4495e51c-6f81-4215-8c28-fb72754e88b3 tempest-ServersAdminNegativeTestJSON-706848671 tempest-ServersAdminNegativeTestJSON-706848671-project-member] Acquiring lock "refresh_cache-ebc7dc27-110e-4edc-87be-cb0ecdbe215f" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 560.939039] env[62096]: DEBUG oslo_concurrency.lockutils [None req-4495e51c-6f81-4215-8c28-fb72754e88b3 tempest-ServersAdminNegativeTestJSON-706848671 tempest-ServersAdminNegativeTestJSON-706848671-project-member] Acquired lock "refresh_cache-ebc7dc27-110e-4edc-87be-cb0ecdbe215f" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 560.939222] env[62096]: DEBUG nova.network.neutron [None req-4495e51c-6f81-4215-8c28-fb72754e88b3 tempest-ServersAdminNegativeTestJSON-706848671 tempest-ServersAdminNegativeTestJSON-706848671-project-member] [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 560.939620] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4495e51c-6f81-4215-8c28-fb72754e88b3 tempest-ServersAdminNegativeTestJSON-706848671 tempest-ServersAdminNegativeTestJSON-706848671-project-member] Expecting reply to msg 2336b11a2cf647b9aa9cc0b7867f8252 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 560.960395] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2336b11a2cf647b9aa9cc0b7867f8252 [ 560.984053] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d13fb3d3dce44c0ba490d7f3de02d5de [ 561.053990] env[62096]: ERROR nova.compute.manager [None req-a5e0ed8f-7bbe-4249-b49a-d529c1e372e6 tempest-ServerActionsTestOtherB-733566255 tempest-ServerActionsTestOtherB-733566255-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 0198948c-99f9-4d41-9309-650d6ba5c5cc, please check neutron logs for more information. [ 561.053990] env[62096]: ERROR nova.compute.manager Traceback (most recent call last): [ 561.053990] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 561.053990] env[62096]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 561.053990] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 561.053990] env[62096]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 561.053990] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 561.053990] env[62096]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 561.053990] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 561.053990] env[62096]: ERROR nova.compute.manager self.force_reraise() [ 561.053990] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 561.053990] env[62096]: ERROR nova.compute.manager raise self.value [ 561.053990] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 561.053990] env[62096]: ERROR nova.compute.manager updated_port = self._update_port( [ 561.053990] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 561.053990] env[62096]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 561.054537] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 561.054537] env[62096]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 561.054537] env[62096]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 0198948c-99f9-4d41-9309-650d6ba5c5cc, please check neutron logs for more information. [ 561.054537] env[62096]: ERROR nova.compute.manager [ 561.054537] env[62096]: Traceback (most recent call last): [ 561.054537] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 561.054537] env[62096]: listener.cb(fileno) [ 561.054537] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 561.054537] env[62096]: result = function(*args, **kwargs) [ 561.054537] env[62096]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 561.054537] env[62096]: return func(*args, **kwargs) [ 561.054537] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 561.054537] env[62096]: raise e [ 561.054537] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 561.054537] env[62096]: nwinfo = self.network_api.allocate_for_instance( [ 561.054537] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 561.054537] env[62096]: created_port_ids = self._update_ports_for_instance( [ 561.054537] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 561.054537] env[62096]: with excutils.save_and_reraise_exception(): [ 561.054537] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 561.054537] env[62096]: self.force_reraise() [ 561.054537] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 561.054537] env[62096]: raise self.value [ 561.054537] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 561.054537] env[62096]: updated_port = self._update_port( [ 561.054537] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 561.054537] env[62096]: _ensure_no_port_binding_failure(port) [ 561.054537] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 561.054537] env[62096]: raise exception.PortBindingFailed(port_id=port['id']) [ 561.055459] env[62096]: nova.exception.PortBindingFailed: Binding failed for port 0198948c-99f9-4d41-9309-650d6ba5c5cc, please check neutron logs for more information. [ 561.055459] env[62096]: Removing descriptor: 16 [ 561.055459] env[62096]: ERROR nova.compute.manager [None req-a5e0ed8f-7bbe-4249-b49a-d529c1e372e6 tempest-ServerActionsTestOtherB-733566255 tempest-ServerActionsTestOtherB-733566255-project-member] [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 0198948c-99f9-4d41-9309-650d6ba5c5cc, please check neutron logs for more information. [ 561.055459] env[62096]: ERROR nova.compute.manager [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] Traceback (most recent call last): [ 561.055459] env[62096]: ERROR nova.compute.manager [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 561.055459] env[62096]: ERROR nova.compute.manager [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] yield resources [ 561.055459] env[62096]: ERROR nova.compute.manager [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 561.055459] env[62096]: ERROR nova.compute.manager [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] self.driver.spawn(context, instance, image_meta, [ 561.055459] env[62096]: ERROR nova.compute.manager [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 561.055459] env[62096]: ERROR nova.compute.manager [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 561.055459] env[62096]: ERROR nova.compute.manager [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 561.055459] env[62096]: ERROR nova.compute.manager [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] vm_ref = self.build_virtual_machine(instance, [ 561.055766] env[62096]: ERROR nova.compute.manager [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 561.055766] env[62096]: ERROR nova.compute.manager [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] vif_infos = vmwarevif.get_vif_info(self._session, [ 561.055766] env[62096]: ERROR nova.compute.manager [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 561.055766] env[62096]: ERROR nova.compute.manager [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] for vif in network_info: [ 561.055766] env[62096]: ERROR nova.compute.manager [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 561.055766] env[62096]: ERROR nova.compute.manager [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] return self._sync_wrapper(fn, *args, **kwargs) [ 561.055766] env[62096]: ERROR nova.compute.manager [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 561.055766] env[62096]: ERROR nova.compute.manager [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] self.wait() [ 561.055766] env[62096]: ERROR nova.compute.manager [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 561.055766] env[62096]: ERROR nova.compute.manager [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] self[:] = self._gt.wait() [ 561.055766] env[62096]: ERROR nova.compute.manager [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 561.055766] env[62096]: ERROR nova.compute.manager [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] return self._exit_event.wait() [ 561.055766] env[62096]: ERROR nova.compute.manager [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 561.056134] env[62096]: ERROR nova.compute.manager [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] result = hub.switch() [ 561.056134] env[62096]: ERROR nova.compute.manager [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 561.056134] env[62096]: ERROR nova.compute.manager [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] return self.greenlet.switch() [ 561.056134] env[62096]: ERROR nova.compute.manager [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 561.056134] env[62096]: ERROR nova.compute.manager [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] result = function(*args, **kwargs) [ 561.056134] env[62096]: ERROR nova.compute.manager [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 561.056134] env[62096]: ERROR nova.compute.manager [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] return func(*args, **kwargs) [ 561.056134] env[62096]: ERROR nova.compute.manager [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 561.056134] env[62096]: ERROR nova.compute.manager [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] raise e [ 561.056134] env[62096]: ERROR nova.compute.manager [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 561.056134] env[62096]: ERROR nova.compute.manager [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] nwinfo = self.network_api.allocate_for_instance( [ 561.056134] env[62096]: ERROR nova.compute.manager [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 561.056134] env[62096]: ERROR nova.compute.manager [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] created_port_ids = self._update_ports_for_instance( [ 561.056510] env[62096]: ERROR nova.compute.manager [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 561.056510] env[62096]: ERROR nova.compute.manager [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] with excutils.save_and_reraise_exception(): [ 561.056510] env[62096]: ERROR nova.compute.manager [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 561.056510] env[62096]: ERROR nova.compute.manager [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] self.force_reraise() [ 561.056510] env[62096]: ERROR nova.compute.manager [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 561.056510] env[62096]: ERROR nova.compute.manager [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] raise self.value [ 561.056510] env[62096]: ERROR nova.compute.manager [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 561.056510] env[62096]: ERROR nova.compute.manager [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] updated_port = self._update_port( [ 561.056510] env[62096]: ERROR nova.compute.manager [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 561.056510] env[62096]: ERROR nova.compute.manager [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] _ensure_no_port_binding_failure(port) [ 561.056510] env[62096]: ERROR nova.compute.manager [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 561.056510] env[62096]: ERROR nova.compute.manager [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] raise exception.PortBindingFailed(port_id=port['id']) [ 561.056819] env[62096]: ERROR nova.compute.manager [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] nova.exception.PortBindingFailed: Binding failed for port 0198948c-99f9-4d41-9309-650d6ba5c5cc, please check neutron logs for more information. [ 561.056819] env[62096]: ERROR nova.compute.manager [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] [ 561.056819] env[62096]: INFO nova.compute.manager [None req-a5e0ed8f-7bbe-4249-b49a-d529c1e372e6 tempest-ServerActionsTestOtherB-733566255 tempest-ServerActionsTestOtherB-733566255-project-member] [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] Terminating instance [ 561.058793] env[62096]: DEBUG oslo_concurrency.lockutils [None req-a5e0ed8f-7bbe-4249-b49a-d529c1e372e6 tempest-ServerActionsTestOtherB-733566255 tempest-ServerActionsTestOtherB-733566255-project-member] Acquiring lock "refresh_cache-773f55a5-d40c-445e-b02a-268d4f88fdd7" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 561.058793] env[62096]: DEBUG oslo_concurrency.lockutils [None req-a5e0ed8f-7bbe-4249-b49a-d529c1e372e6 tempest-ServerActionsTestOtherB-733566255 tempest-ServerActionsTestOtherB-733566255-project-member] Acquired lock "refresh_cache-773f55a5-d40c-445e-b02a-268d4f88fdd7" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 561.058793] env[62096]: DEBUG nova.network.neutron [None req-a5e0ed8f-7bbe-4249-b49a-d529c1e372e6 tempest-ServerActionsTestOtherB-733566255 tempest-ServerActionsTestOtherB-733566255-project-member] [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 561.058793] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a5e0ed8f-7bbe-4249-b49a-d529c1e372e6 tempest-ServerActionsTestOtherB-733566255 tempest-ServerActionsTestOtherB-733566255-project-member] Expecting reply to msg f4bcdf330f1e48e4a3c3e62cfdba71f2 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 561.064655] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f4bcdf330f1e48e4a3c3e62cfdba71f2 [ 561.338013] env[62096]: INFO nova.compute.manager [None req-7dd77095-b2e2-4af3-8029-5e47b60be2ee tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] [instance: 5ea9e99b-dffd-4b11-97ff-634551c00745] Took 1.05 seconds to deallocate network for instance. [ 561.339560] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-7dd77095-b2e2-4af3-8029-5e47b60be2ee tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Expecting reply to msg 0e80544f7d784d3b8c42a1ab8e68042b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 561.408074] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0e80544f7d784d3b8c42a1ab8e68042b [ 561.490906] env[62096]: DEBUG nova.network.neutron [None req-4495e51c-6f81-4215-8c28-fb72754e88b3 tempest-ServersAdminNegativeTestJSON-706848671 tempest-ServersAdminNegativeTestJSON-706848671-project-member] [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 561.648110] env[62096]: DEBUG nova.network.neutron [None req-a5e0ed8f-7bbe-4249-b49a-d529c1e372e6 tempest-ServerActionsTestOtherB-733566255 tempest-ServerActionsTestOtherB-733566255-project-member] [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 561.676190] env[62096]: DEBUG oslo_concurrency.lockutils [None req-a7dac61a-a9fc-4db2-bd21-9f05c56d2478 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Acquiring lock "805c4c87-4e1a-4733-86a0-4c82daf615eb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 561.676434] env[62096]: DEBUG oslo_concurrency.lockutils [None req-a7dac61a-a9fc-4db2-bd21-9f05c56d2478 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Lock "805c4c87-4e1a-4733-86a0-4c82daf615eb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 561.722266] env[62096]: DEBUG nova.network.neutron [None req-4495e51c-6f81-4215-8c28-fb72754e88b3 tempest-ServersAdminNegativeTestJSON-706848671 tempest-ServersAdminNegativeTestJSON-706848671-project-member] [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 561.722266] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4495e51c-6f81-4215-8c28-fb72754e88b3 tempest-ServersAdminNegativeTestJSON-706848671 tempest-ServersAdminNegativeTestJSON-706848671-project-member] Expecting reply to msg 4cd7a3ce6d5d4902a4a5b3a184f44ecb in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 561.738888] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4cd7a3ce6d5d4902a4a5b3a184f44ecb [ 561.740080] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f254c91-4276-4d22-82f0-1bcfb17c1809 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.754292] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dc50654-6764-4fa8-85b6-3b9eb1be628b {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.819249] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbd61b89-f985-4fd8-9588-d76b5639dbac {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.827986] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce410757-5a8f-480e-b0c5-17bfff43478f {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.851888] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-7dd77095-b2e2-4af3-8029-5e47b60be2ee tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Expecting reply to msg f5b46dab146145bcb264b70b236959b0 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 561.853218] env[62096]: DEBUG nova.compute.provider_tree [None req-530f9427-aadb-4fc8-9c2d-063e58e29f49 tempest-FloatingIPsAssociationTestJSON-1970391969 tempest-FloatingIPsAssociationTestJSON-1970391969-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 561.860057] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-530f9427-aadb-4fc8-9c2d-063e58e29f49 tempest-FloatingIPsAssociationTestJSON-1970391969 tempest-FloatingIPsAssociationTestJSON-1970391969-project-member] Expecting reply to msg 8cc4b188d2c347ceb3572acf6784044d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 561.860057] env[62096]: DEBUG nova.compute.manager [req-9163f9e7-6388-4a81-8cb1-5c703da0df1d req-d0570ff3-bc1d-48b5-9af1-4da2b5c3ecbf service nova] [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] Received event network-changed-0198948c-99f9-4d41-9309-650d6ba5c5cc {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 561.860057] env[62096]: DEBUG nova.compute.manager [req-9163f9e7-6388-4a81-8cb1-5c703da0df1d req-d0570ff3-bc1d-48b5-9af1-4da2b5c3ecbf service nova] [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] Refreshing instance network info cache due to event network-changed-0198948c-99f9-4d41-9309-650d6ba5c5cc. {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 561.860057] env[62096]: DEBUG oslo_concurrency.lockutils [req-9163f9e7-6388-4a81-8cb1-5c703da0df1d req-d0570ff3-bc1d-48b5-9af1-4da2b5c3ecbf service nova] Acquiring lock "refresh_cache-773f55a5-d40c-445e-b02a-268d4f88fdd7" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 561.880880] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8cc4b188d2c347ceb3572acf6784044d [ 561.936380] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f5b46dab146145bcb264b70b236959b0 [ 561.960250] env[62096]: DEBUG nova.network.neutron [None req-a5e0ed8f-7bbe-4249-b49a-d529c1e372e6 tempest-ServerActionsTestOtherB-733566255 tempest-ServerActionsTestOtherB-733566255-project-member] [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 561.960974] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a5e0ed8f-7bbe-4249-b49a-d529c1e372e6 tempest-ServerActionsTestOtherB-733566255 tempest-ServerActionsTestOtherB-733566255-project-member] Expecting reply to msg edfed4664cdf4595bb4f675cb2cb7415 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 561.980786] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg edfed4664cdf4595bb4f675cb2cb7415 [ 562.220988] env[62096]: DEBUG oslo_concurrency.lockutils [None req-4495e51c-6f81-4215-8c28-fb72754e88b3 tempest-ServersAdminNegativeTestJSON-706848671 tempest-ServersAdminNegativeTestJSON-706848671-project-member] Releasing lock "refresh_cache-ebc7dc27-110e-4edc-87be-cb0ecdbe215f" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 562.221238] env[62096]: DEBUG nova.compute.manager [None req-4495e51c-6f81-4215-8c28-fb72754e88b3 tempest-ServersAdminNegativeTestJSON-706848671 tempest-ServersAdminNegativeTestJSON-706848671-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 562.221403] env[62096]: DEBUG nova.compute.manager [None req-4495e51c-6f81-4215-8c28-fb72754e88b3 tempest-ServersAdminNegativeTestJSON-706848671 tempest-ServersAdminNegativeTestJSON-706848671-project-member] [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 562.221568] env[62096]: DEBUG nova.network.neutron [None req-4495e51c-6f81-4215-8c28-fb72754e88b3 tempest-ServersAdminNegativeTestJSON-706848671 tempest-ServersAdminNegativeTestJSON-706848671-project-member] [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 562.249712] env[62096]: DEBUG nova.network.neutron [None req-4495e51c-6f81-4215-8c28-fb72754e88b3 tempest-ServersAdminNegativeTestJSON-706848671 tempest-ServersAdminNegativeTestJSON-706848671-project-member] [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 562.250315] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4495e51c-6f81-4215-8c28-fb72754e88b3 tempest-ServersAdminNegativeTestJSON-706848671 tempest-ServersAdminNegativeTestJSON-706848671-project-member] Expecting reply to msg 0f631ca7d0f44cd6b761e47798187f1e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 562.257650] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0f631ca7d0f44cd6b761e47798187f1e [ 562.359761] env[62096]: DEBUG nova.scheduler.client.report [None req-530f9427-aadb-4fc8-9c2d-063e58e29f49 tempest-FloatingIPsAssociationTestJSON-1970391969 tempest-FloatingIPsAssociationTestJSON-1970391969-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 562.362127] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-530f9427-aadb-4fc8-9c2d-063e58e29f49 tempest-FloatingIPsAssociationTestJSON-1970391969 tempest-FloatingIPsAssociationTestJSON-1970391969-project-member] Expecting reply to msg 91bead52b75e4f94a1dcd0339395ffd5 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 562.375112] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 91bead52b75e4f94a1dcd0339395ffd5 [ 562.386641] env[62096]: INFO nova.scheduler.client.report [None req-7dd77095-b2e2-4af3-8029-5e47b60be2ee tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Deleted allocations for instance 5ea9e99b-dffd-4b11-97ff-634551c00745 [ 562.392075] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-7dd77095-b2e2-4af3-8029-5e47b60be2ee tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Expecting reply to msg 13052236fa114ed08be1e59f29fa4cba in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 562.411452] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 13052236fa114ed08be1e59f29fa4cba [ 562.465209] env[62096]: DEBUG oslo_concurrency.lockutils [None req-a5e0ed8f-7bbe-4249-b49a-d529c1e372e6 tempest-ServerActionsTestOtherB-733566255 tempest-ServerActionsTestOtherB-733566255-project-member] Releasing lock "refresh_cache-773f55a5-d40c-445e-b02a-268d4f88fdd7" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 562.465389] env[62096]: DEBUG nova.compute.manager [None req-a5e0ed8f-7bbe-4249-b49a-d529c1e372e6 tempest-ServerActionsTestOtherB-733566255 tempest-ServerActionsTestOtherB-733566255-project-member] [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] Start destroying the instance on the hypervisor. {{(pid=62096) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 562.465581] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-a5e0ed8f-7bbe-4249-b49a-d529c1e372e6 tempest-ServerActionsTestOtherB-733566255 tempest-ServerActionsTestOtherB-733566255-project-member] [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] Destroying instance {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 562.465897] env[62096]: DEBUG oslo_concurrency.lockutils [req-9163f9e7-6388-4a81-8cb1-5c703da0df1d req-d0570ff3-bc1d-48b5-9af1-4da2b5c3ecbf service nova] Acquired lock "refresh_cache-773f55a5-d40c-445e-b02a-268d4f88fdd7" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 562.466043] env[62096]: DEBUG nova.network.neutron [req-9163f9e7-6388-4a81-8cb1-5c703da0df1d req-d0570ff3-bc1d-48b5-9af1-4da2b5c3ecbf service nova] [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] Refreshing network info cache for port 0198948c-99f9-4d41-9309-650d6ba5c5cc {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 562.466503] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-9163f9e7-6388-4a81-8cb1-5c703da0df1d req-d0570ff3-bc1d-48b5-9af1-4da2b5c3ecbf service nova] Expecting reply to msg 911be8089c03407e8f1116b7a897fb45 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 562.468623] env[62096]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a2249db4-11e7-43de-9215-5fb877957f27 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.476319] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56c0d862-fc11-48f2-93e2-9016b4e768f6 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.489552] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 911be8089c03407e8f1116b7a897fb45 [ 562.501341] env[62096]: WARNING nova.virt.vmwareapi.vmops [None req-a5e0ed8f-7bbe-4249-b49a-d529c1e372e6 tempest-ServerActionsTestOtherB-733566255 tempest-ServerActionsTestOtherB-733566255-project-member] [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 773f55a5-d40c-445e-b02a-268d4f88fdd7 could not be found. [ 562.501548] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-a5e0ed8f-7bbe-4249-b49a-d529c1e372e6 tempest-ServerActionsTestOtherB-733566255 tempest-ServerActionsTestOtherB-733566255-project-member] [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] Instance destroyed {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 562.501742] env[62096]: INFO nova.compute.manager [None req-a5e0ed8f-7bbe-4249-b49a-d529c1e372e6 tempest-ServerActionsTestOtherB-733566255 tempest-ServerActionsTestOtherB-733566255-project-member] [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] Took 0.04 seconds to destroy the instance on the hypervisor. [ 562.501969] env[62096]: DEBUG oslo.service.loopingcall [None req-a5e0ed8f-7bbe-4249-b49a-d529c1e372e6 tempest-ServerActionsTestOtherB-733566255 tempest-ServerActionsTestOtherB-733566255-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 562.502174] env[62096]: DEBUG nova.compute.manager [-] [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 562.502272] env[62096]: DEBUG nova.network.neutron [-] [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 562.547600] env[62096]: DEBUG nova.network.neutron [-] [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 562.547779] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg a20deea11ce049bd943426e06576a976 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 562.555498] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a20deea11ce049bd943426e06576a976 [ 562.752095] env[62096]: DEBUG nova.network.neutron [None req-4495e51c-6f81-4215-8c28-fb72754e88b3 tempest-ServersAdminNegativeTestJSON-706848671 tempest-ServersAdminNegativeTestJSON-706848671-project-member] [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 562.752640] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4495e51c-6f81-4215-8c28-fb72754e88b3 tempest-ServersAdminNegativeTestJSON-706848671 tempest-ServersAdminNegativeTestJSON-706848671-project-member] Expecting reply to msg 8850aabead33450f94520b19edc8adf8 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 562.761568] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8850aabead33450f94520b19edc8adf8 [ 562.866822] env[62096]: DEBUG oslo_concurrency.lockutils [None req-530f9427-aadb-4fc8-9c2d-063e58e29f49 tempest-FloatingIPsAssociationTestJSON-1970391969 tempest-FloatingIPsAssociationTestJSON-1970391969-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.930s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 562.866822] env[62096]: ERROR nova.compute.manager [None req-530f9427-aadb-4fc8-9c2d-063e58e29f49 tempest-FloatingIPsAssociationTestJSON-1970391969 tempest-FloatingIPsAssociationTestJSON-1970391969-project-member] [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 76965fb1-e16e-43bd-854f-ab0f78dc111e, please check neutron logs for more information. [ 562.866822] env[62096]: ERROR nova.compute.manager [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] Traceback (most recent call last): [ 562.866822] env[62096]: ERROR nova.compute.manager [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 562.866822] env[62096]: ERROR nova.compute.manager [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] self.driver.spawn(context, instance, image_meta, [ 562.866822] env[62096]: ERROR nova.compute.manager [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 562.866822] env[62096]: ERROR nova.compute.manager [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 562.866822] env[62096]: ERROR nova.compute.manager [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 562.866822] env[62096]: ERROR nova.compute.manager [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] vm_ref = self.build_virtual_machine(instance, [ 562.867158] env[62096]: ERROR nova.compute.manager [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 562.867158] env[62096]: ERROR nova.compute.manager [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] vif_infos = vmwarevif.get_vif_info(self._session, [ 562.867158] env[62096]: ERROR nova.compute.manager [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 562.867158] env[62096]: ERROR nova.compute.manager [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] for vif in network_info: [ 562.867158] env[62096]: ERROR nova.compute.manager [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 562.867158] env[62096]: ERROR nova.compute.manager [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] return self._sync_wrapper(fn, *args, **kwargs) [ 562.867158] env[62096]: ERROR nova.compute.manager [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 562.867158] env[62096]: ERROR nova.compute.manager [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] self.wait() [ 562.867158] env[62096]: ERROR nova.compute.manager [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 562.867158] env[62096]: ERROR nova.compute.manager [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] self[:] = self._gt.wait() [ 562.867158] env[62096]: ERROR nova.compute.manager [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 562.867158] env[62096]: ERROR nova.compute.manager [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] return self._exit_event.wait() [ 562.867158] env[62096]: ERROR nova.compute.manager [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 562.867483] env[62096]: ERROR nova.compute.manager [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] result = hub.switch() [ 562.867483] env[62096]: ERROR nova.compute.manager [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 562.867483] env[62096]: ERROR nova.compute.manager [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] return self.greenlet.switch() [ 562.867483] env[62096]: ERROR nova.compute.manager [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 562.867483] env[62096]: ERROR nova.compute.manager [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] result = function(*args, **kwargs) [ 562.867483] env[62096]: ERROR nova.compute.manager [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 562.867483] env[62096]: ERROR nova.compute.manager [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] return func(*args, **kwargs) [ 562.867483] env[62096]: ERROR nova.compute.manager [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 562.867483] env[62096]: ERROR nova.compute.manager [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] raise e [ 562.867483] env[62096]: ERROR nova.compute.manager [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 562.867483] env[62096]: ERROR nova.compute.manager [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] nwinfo = self.network_api.allocate_for_instance( [ 562.867483] env[62096]: ERROR nova.compute.manager [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 562.867483] env[62096]: ERROR nova.compute.manager [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] created_port_ids = self._update_ports_for_instance( [ 562.867809] env[62096]: ERROR nova.compute.manager [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 562.867809] env[62096]: ERROR nova.compute.manager [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] with excutils.save_and_reraise_exception(): [ 562.867809] env[62096]: ERROR nova.compute.manager [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 562.867809] env[62096]: ERROR nova.compute.manager [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] self.force_reraise() [ 562.867809] env[62096]: ERROR nova.compute.manager [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 562.867809] env[62096]: ERROR nova.compute.manager [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] raise self.value [ 562.867809] env[62096]: ERROR nova.compute.manager [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 562.867809] env[62096]: ERROR nova.compute.manager [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] updated_port = self._update_port( [ 562.867809] env[62096]: ERROR nova.compute.manager [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 562.867809] env[62096]: ERROR nova.compute.manager [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] _ensure_no_port_binding_failure(port) [ 562.867809] env[62096]: ERROR nova.compute.manager [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 562.867809] env[62096]: ERROR nova.compute.manager [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] raise exception.PortBindingFailed(port_id=port['id']) [ 562.868126] env[62096]: ERROR nova.compute.manager [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] nova.exception.PortBindingFailed: Binding failed for port 76965fb1-e16e-43bd-854f-ab0f78dc111e, please check neutron logs for more information. [ 562.868126] env[62096]: ERROR nova.compute.manager [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] [ 562.868126] env[62096]: DEBUG nova.compute.utils [None req-530f9427-aadb-4fc8-9c2d-063e58e29f49 tempest-FloatingIPsAssociationTestJSON-1970391969 tempest-FloatingIPsAssociationTestJSON-1970391969-project-member] [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] Binding failed for port 76965fb1-e16e-43bd-854f-ab0f78dc111e, please check neutron logs for more information. {{(pid=62096) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 562.868126] env[62096]: DEBUG oslo_concurrency.lockutils [None req-298b76a9-0fa7-45b3-84c9-138485632e29 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.071s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 562.871317] env[62096]: INFO nova.compute.claims [None req-298b76a9-0fa7-45b3-84c9-138485632e29 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 562.873006] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-298b76a9-0fa7-45b3-84c9-138485632e29 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Expecting reply to msg ea9eea72b3ca41c88e3fd6cb98cf2562 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 562.878596] env[62096]: DEBUG nova.compute.manager [None req-530f9427-aadb-4fc8-9c2d-063e58e29f49 tempest-FloatingIPsAssociationTestJSON-1970391969 tempest-FloatingIPsAssociationTestJSON-1970391969-project-member] [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] Build of instance 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2 was re-scheduled: Binding failed for port 76965fb1-e16e-43bd-854f-ab0f78dc111e, please check neutron logs for more information. {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 562.878596] env[62096]: DEBUG nova.compute.manager [None req-530f9427-aadb-4fc8-9c2d-063e58e29f49 tempest-FloatingIPsAssociationTestJSON-1970391969 tempest-FloatingIPsAssociationTestJSON-1970391969-project-member] [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] Unplugging VIFs for instance {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 562.878596] env[62096]: DEBUG oslo_concurrency.lockutils [None req-530f9427-aadb-4fc8-9c2d-063e58e29f49 tempest-FloatingIPsAssociationTestJSON-1970391969 tempest-FloatingIPsAssociationTestJSON-1970391969-project-member] Acquiring lock "refresh_cache-8001fc1b-fa8c-496d-acc8-922cfd5d5cc2" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 562.878596] env[62096]: DEBUG oslo_concurrency.lockutils [None req-530f9427-aadb-4fc8-9c2d-063e58e29f49 tempest-FloatingIPsAssociationTestJSON-1970391969 tempest-FloatingIPsAssociationTestJSON-1970391969-project-member] Acquired lock "refresh_cache-8001fc1b-fa8c-496d-acc8-922cfd5d5cc2" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 562.878989] env[62096]: DEBUG nova.network.neutron [None req-530f9427-aadb-4fc8-9c2d-063e58e29f49 tempest-FloatingIPsAssociationTestJSON-1970391969 tempest-FloatingIPsAssociationTestJSON-1970391969-project-member] [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 562.878989] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-530f9427-aadb-4fc8-9c2d-063e58e29f49 tempest-FloatingIPsAssociationTestJSON-1970391969 tempest-FloatingIPsAssociationTestJSON-1970391969-project-member] Expecting reply to msg 96ae0233fd8b4e4493276ed1ff32759d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 562.885986] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 96ae0233fd8b4e4493276ed1ff32759d [ 562.898419] env[62096]: DEBUG oslo_concurrency.lockutils [None req-7dd77095-b2e2-4af3-8029-5e47b60be2ee tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Lock "5ea9e99b-dffd-4b11-97ff-634551c00745" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 33.584s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 562.899017] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-19c9aad9-d898-4c0f-b588-f60abf561c62 tempest-ServersTestBootFromVolume-2123063213 tempest-ServersTestBootFromVolume-2123063213-project-member] Expecting reply to msg 57aedc731c8c495e9e78d09d0e8eccfc in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 562.919138] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 57aedc731c8c495e9e78d09d0e8eccfc [ 562.928759] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ea9eea72b3ca41c88e3fd6cb98cf2562 [ 562.985483] env[62096]: DEBUG nova.network.neutron [req-9163f9e7-6388-4a81-8cb1-5c703da0df1d req-d0570ff3-bc1d-48b5-9af1-4da2b5c3ecbf service nova] [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 563.050341] env[62096]: DEBUG nova.network.neutron [-] [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 563.050830] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 500761a30bd1487f985463dc8ee24ef5 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 563.059545] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 500761a30bd1487f985463dc8ee24ef5 [ 563.060797] env[62096]: DEBUG nova.network.neutron [req-9163f9e7-6388-4a81-8cb1-5c703da0df1d req-d0570ff3-bc1d-48b5-9af1-4da2b5c3ecbf service nova] [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 563.061276] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-9163f9e7-6388-4a81-8cb1-5c703da0df1d req-d0570ff3-bc1d-48b5-9af1-4da2b5c3ecbf service nova] Expecting reply to msg 5055035af4054556ab3902e012de240d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 563.073437] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5055035af4054556ab3902e012de240d [ 563.261038] env[62096]: INFO nova.compute.manager [None req-4495e51c-6f81-4215-8c28-fb72754e88b3 tempest-ServersAdminNegativeTestJSON-706848671 tempest-ServersAdminNegativeTestJSON-706848671-project-member] [instance: ebc7dc27-110e-4edc-87be-cb0ecdbe215f] Took 1.04 seconds to deallocate network for instance. [ 563.261038] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4495e51c-6f81-4215-8c28-fb72754e88b3 tempest-ServersAdminNegativeTestJSON-706848671 tempest-ServersAdminNegativeTestJSON-706848671-project-member] Expecting reply to msg e9a519158a634d5c8bba0b7285344889 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 563.304858] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e9a519158a634d5c8bba0b7285344889 [ 563.379393] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-298b76a9-0fa7-45b3-84c9-138485632e29 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Expecting reply to msg 103b1f2f8fc24e61a7a8adc88c73628a in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 563.389999] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 103b1f2f8fc24e61a7a8adc88c73628a [ 563.400799] env[62096]: DEBUG nova.compute.manager [None req-19c9aad9-d898-4c0f-b588-f60abf561c62 tempest-ServersTestBootFromVolume-2123063213 tempest-ServersTestBootFromVolume-2123063213-project-member] [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] Starting instance... {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 563.402884] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-19c9aad9-d898-4c0f-b588-f60abf561c62 tempest-ServersTestBootFromVolume-2123063213 tempest-ServersTestBootFromVolume-2123063213-project-member] Expecting reply to msg 39962d255f104d93937c072aeef7660b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 563.422025] env[62096]: DEBUG nova.network.neutron [None req-530f9427-aadb-4fc8-9c2d-063e58e29f49 tempest-FloatingIPsAssociationTestJSON-1970391969 tempest-FloatingIPsAssociationTestJSON-1970391969-project-member] [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 563.455044] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 39962d255f104d93937c072aeef7660b [ 563.530315] env[62096]: DEBUG nova.network.neutron [None req-530f9427-aadb-4fc8-9c2d-063e58e29f49 tempest-FloatingIPsAssociationTestJSON-1970391969 tempest-FloatingIPsAssociationTestJSON-1970391969-project-member] [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 563.530859] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-530f9427-aadb-4fc8-9c2d-063e58e29f49 tempest-FloatingIPsAssociationTestJSON-1970391969 tempest-FloatingIPsAssociationTestJSON-1970391969-project-member] Expecting reply to msg 18c9158732374554a5769c690b14582e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 563.541273] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 18c9158732374554a5769c690b14582e [ 563.553012] env[62096]: INFO nova.compute.manager [-] [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] Took 1.05 seconds to deallocate network for instance. [ 563.556397] env[62096]: DEBUG nova.compute.claims [None req-a5e0ed8f-7bbe-4249-b49a-d529c1e372e6 tempest-ServerActionsTestOtherB-733566255 tempest-ServerActionsTestOtherB-733566255-project-member] [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] Aborting claim: {{(pid=62096) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 563.556573] env[62096]: DEBUG oslo_concurrency.lockutils [None req-a5e0ed8f-7bbe-4249-b49a-d529c1e372e6 tempest-ServerActionsTestOtherB-733566255 tempest-ServerActionsTestOtherB-733566255-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 563.563474] env[62096]: DEBUG oslo_concurrency.lockutils [req-9163f9e7-6388-4a81-8cb1-5c703da0df1d req-d0570ff3-bc1d-48b5-9af1-4da2b5c3ecbf service nova] Releasing lock "refresh_cache-773f55a5-d40c-445e-b02a-268d4f88fdd7" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 563.765058] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4495e51c-6f81-4215-8c28-fb72754e88b3 tempest-ServersAdminNegativeTestJSON-706848671 tempest-ServersAdminNegativeTestJSON-706848671-project-member] Expecting reply to msg af7841e92e50447497c1690833f54883 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 563.802319] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg af7841e92e50447497c1690833f54883 [ 563.929937] env[62096]: ERROR nova.compute.manager [None req-4d9c1482-ab74-4f69-8438-670cc92231c6 tempest-VolumesAssistedSnapshotsTest-1011581688 tempest-VolumesAssistedSnapshotsTest-1011581688-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 5fdd06cc-1563-494e-a278-34fb92f36b0a, please check neutron logs for more information. [ 563.929937] env[62096]: ERROR nova.compute.manager Traceback (most recent call last): [ 563.929937] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 563.929937] env[62096]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 563.929937] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 563.929937] env[62096]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 563.929937] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 563.929937] env[62096]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 563.929937] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 563.929937] env[62096]: ERROR nova.compute.manager self.force_reraise() [ 563.929937] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 563.929937] env[62096]: ERROR nova.compute.manager raise self.value [ 563.929937] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 563.929937] env[62096]: ERROR nova.compute.manager updated_port = self._update_port( [ 563.929937] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 563.929937] env[62096]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 563.930610] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 563.930610] env[62096]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 563.930610] env[62096]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 5fdd06cc-1563-494e-a278-34fb92f36b0a, please check neutron logs for more information. [ 563.930610] env[62096]: ERROR nova.compute.manager [ 563.930610] env[62096]: Traceback (most recent call last): [ 563.930610] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 563.930610] env[62096]: listener.cb(fileno) [ 563.930610] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 563.930610] env[62096]: result = function(*args, **kwargs) [ 563.930610] env[62096]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 563.930610] env[62096]: return func(*args, **kwargs) [ 563.930610] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 563.930610] env[62096]: raise e [ 563.930610] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 563.930610] env[62096]: nwinfo = self.network_api.allocate_for_instance( [ 563.930610] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 563.930610] env[62096]: created_port_ids = self._update_ports_for_instance( [ 563.930610] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 563.930610] env[62096]: with excutils.save_and_reraise_exception(): [ 563.930610] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 563.930610] env[62096]: self.force_reraise() [ 563.930610] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 563.930610] env[62096]: raise self.value [ 563.930610] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 563.930610] env[62096]: updated_port = self._update_port( [ 563.930610] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 563.930610] env[62096]: _ensure_no_port_binding_failure(port) [ 563.930610] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 563.930610] env[62096]: raise exception.PortBindingFailed(port_id=port['id']) [ 563.931521] env[62096]: nova.exception.PortBindingFailed: Binding failed for port 5fdd06cc-1563-494e-a278-34fb92f36b0a, please check neutron logs for more information. [ 563.931521] env[62096]: Removing descriptor: 19 [ 563.931521] env[62096]: DEBUG oslo_concurrency.lockutils [None req-19c9aad9-d898-4c0f-b588-f60abf561c62 tempest-ServersTestBootFromVolume-2123063213 tempest-ServersTestBootFromVolume-2123063213-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 563.931702] env[62096]: ERROR nova.compute.manager [None req-4d9c1482-ab74-4f69-8438-670cc92231c6 tempest-VolumesAssistedSnapshotsTest-1011581688 tempest-VolumesAssistedSnapshotsTest-1011581688-project-member] [instance: 3530e93e-f729-471b-976b-e52c3182cb15] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 5fdd06cc-1563-494e-a278-34fb92f36b0a, please check neutron logs for more information. [ 563.931702] env[62096]: ERROR nova.compute.manager [instance: 3530e93e-f729-471b-976b-e52c3182cb15] Traceback (most recent call last): [ 563.931702] env[62096]: ERROR nova.compute.manager [instance: 3530e93e-f729-471b-976b-e52c3182cb15] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 563.931702] env[62096]: ERROR nova.compute.manager [instance: 3530e93e-f729-471b-976b-e52c3182cb15] yield resources [ 563.931702] env[62096]: ERROR nova.compute.manager [instance: 3530e93e-f729-471b-976b-e52c3182cb15] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 563.931702] env[62096]: ERROR nova.compute.manager [instance: 3530e93e-f729-471b-976b-e52c3182cb15] self.driver.spawn(context, instance, image_meta, [ 563.931702] env[62096]: ERROR nova.compute.manager [instance: 3530e93e-f729-471b-976b-e52c3182cb15] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 563.931702] env[62096]: ERROR nova.compute.manager [instance: 3530e93e-f729-471b-976b-e52c3182cb15] self._vmops.spawn(context, instance, image_meta, injected_files, [ 563.931702] env[62096]: ERROR nova.compute.manager [instance: 3530e93e-f729-471b-976b-e52c3182cb15] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 563.931702] env[62096]: ERROR nova.compute.manager [instance: 3530e93e-f729-471b-976b-e52c3182cb15] vm_ref = self.build_virtual_machine(instance, [ 563.931702] env[62096]: ERROR nova.compute.manager [instance: 3530e93e-f729-471b-976b-e52c3182cb15] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 563.932060] env[62096]: ERROR nova.compute.manager [instance: 3530e93e-f729-471b-976b-e52c3182cb15] vif_infos = vmwarevif.get_vif_info(self._session, [ 563.932060] env[62096]: ERROR nova.compute.manager [instance: 3530e93e-f729-471b-976b-e52c3182cb15] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 563.932060] env[62096]: ERROR nova.compute.manager [instance: 3530e93e-f729-471b-976b-e52c3182cb15] for vif in network_info: [ 563.932060] env[62096]: ERROR nova.compute.manager [instance: 3530e93e-f729-471b-976b-e52c3182cb15] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 563.932060] env[62096]: ERROR nova.compute.manager [instance: 3530e93e-f729-471b-976b-e52c3182cb15] return self._sync_wrapper(fn, *args, **kwargs) [ 563.932060] env[62096]: ERROR nova.compute.manager [instance: 3530e93e-f729-471b-976b-e52c3182cb15] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 563.932060] env[62096]: ERROR nova.compute.manager [instance: 3530e93e-f729-471b-976b-e52c3182cb15] self.wait() [ 563.932060] env[62096]: ERROR nova.compute.manager [instance: 3530e93e-f729-471b-976b-e52c3182cb15] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 563.932060] env[62096]: ERROR nova.compute.manager [instance: 3530e93e-f729-471b-976b-e52c3182cb15] self[:] = self._gt.wait() [ 563.932060] env[62096]: ERROR nova.compute.manager [instance: 3530e93e-f729-471b-976b-e52c3182cb15] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 563.932060] env[62096]: ERROR nova.compute.manager [instance: 3530e93e-f729-471b-976b-e52c3182cb15] return self._exit_event.wait() [ 563.932060] env[62096]: ERROR nova.compute.manager [instance: 3530e93e-f729-471b-976b-e52c3182cb15] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 563.932060] env[62096]: ERROR nova.compute.manager [instance: 3530e93e-f729-471b-976b-e52c3182cb15] result = hub.switch() [ 563.932602] env[62096]: ERROR nova.compute.manager [instance: 3530e93e-f729-471b-976b-e52c3182cb15] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 563.932602] env[62096]: ERROR nova.compute.manager [instance: 3530e93e-f729-471b-976b-e52c3182cb15] return self.greenlet.switch() [ 563.932602] env[62096]: ERROR nova.compute.manager [instance: 3530e93e-f729-471b-976b-e52c3182cb15] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 563.932602] env[62096]: ERROR nova.compute.manager [instance: 3530e93e-f729-471b-976b-e52c3182cb15] result = function(*args, **kwargs) [ 563.932602] env[62096]: ERROR nova.compute.manager [instance: 3530e93e-f729-471b-976b-e52c3182cb15] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 563.932602] env[62096]: ERROR nova.compute.manager [instance: 3530e93e-f729-471b-976b-e52c3182cb15] return func(*args, **kwargs) [ 563.932602] env[62096]: ERROR nova.compute.manager [instance: 3530e93e-f729-471b-976b-e52c3182cb15] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 563.932602] env[62096]: ERROR nova.compute.manager [instance: 3530e93e-f729-471b-976b-e52c3182cb15] raise e [ 563.932602] env[62096]: ERROR nova.compute.manager [instance: 3530e93e-f729-471b-976b-e52c3182cb15] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 563.932602] env[62096]: ERROR nova.compute.manager [instance: 3530e93e-f729-471b-976b-e52c3182cb15] nwinfo = self.network_api.allocate_for_instance( [ 563.932602] env[62096]: ERROR nova.compute.manager [instance: 3530e93e-f729-471b-976b-e52c3182cb15] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 563.932602] env[62096]: ERROR nova.compute.manager [instance: 3530e93e-f729-471b-976b-e52c3182cb15] created_port_ids = self._update_ports_for_instance( [ 563.932602] env[62096]: ERROR nova.compute.manager [instance: 3530e93e-f729-471b-976b-e52c3182cb15] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 563.933235] env[62096]: ERROR nova.compute.manager [instance: 3530e93e-f729-471b-976b-e52c3182cb15] with excutils.save_and_reraise_exception(): [ 563.933235] env[62096]: ERROR nova.compute.manager [instance: 3530e93e-f729-471b-976b-e52c3182cb15] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 563.933235] env[62096]: ERROR nova.compute.manager [instance: 3530e93e-f729-471b-976b-e52c3182cb15] self.force_reraise() [ 563.933235] env[62096]: ERROR nova.compute.manager [instance: 3530e93e-f729-471b-976b-e52c3182cb15] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 563.933235] env[62096]: ERROR nova.compute.manager [instance: 3530e93e-f729-471b-976b-e52c3182cb15] raise self.value [ 563.933235] env[62096]: ERROR nova.compute.manager [instance: 3530e93e-f729-471b-976b-e52c3182cb15] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 563.933235] env[62096]: ERROR nova.compute.manager [instance: 3530e93e-f729-471b-976b-e52c3182cb15] updated_port = self._update_port( [ 563.933235] env[62096]: ERROR nova.compute.manager [instance: 3530e93e-f729-471b-976b-e52c3182cb15] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 563.933235] env[62096]: ERROR nova.compute.manager [instance: 3530e93e-f729-471b-976b-e52c3182cb15] _ensure_no_port_binding_failure(port) [ 563.933235] env[62096]: ERROR nova.compute.manager [instance: 3530e93e-f729-471b-976b-e52c3182cb15] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 563.933235] env[62096]: ERROR nova.compute.manager [instance: 3530e93e-f729-471b-976b-e52c3182cb15] raise exception.PortBindingFailed(port_id=port['id']) [ 563.933235] env[62096]: ERROR nova.compute.manager [instance: 3530e93e-f729-471b-976b-e52c3182cb15] nova.exception.PortBindingFailed: Binding failed for port 5fdd06cc-1563-494e-a278-34fb92f36b0a, please check neutron logs for more information. [ 563.933235] env[62096]: ERROR nova.compute.manager [instance: 3530e93e-f729-471b-976b-e52c3182cb15] [ 563.933646] env[62096]: INFO nova.compute.manager [None req-4d9c1482-ab74-4f69-8438-670cc92231c6 tempest-VolumesAssistedSnapshotsTest-1011581688 tempest-VolumesAssistedSnapshotsTest-1011581688-project-member] [instance: 3530e93e-f729-471b-976b-e52c3182cb15] Terminating instance [ 563.938416] env[62096]: DEBUG oslo_concurrency.lockutils [None req-4d9c1482-ab74-4f69-8438-670cc92231c6 tempest-VolumesAssistedSnapshotsTest-1011581688 tempest-VolumesAssistedSnapshotsTest-1011581688-project-member] Acquiring lock "refresh_cache-3530e93e-f729-471b-976b-e52c3182cb15" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 563.938574] env[62096]: DEBUG oslo_concurrency.lockutils [None req-4d9c1482-ab74-4f69-8438-670cc92231c6 tempest-VolumesAssistedSnapshotsTest-1011581688 tempest-VolumesAssistedSnapshotsTest-1011581688-project-member] Acquired lock "refresh_cache-3530e93e-f729-471b-976b-e52c3182cb15" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 563.938734] env[62096]: DEBUG nova.network.neutron [None req-4d9c1482-ab74-4f69-8438-670cc92231c6 tempest-VolumesAssistedSnapshotsTest-1011581688 tempest-VolumesAssistedSnapshotsTest-1011581688-project-member] [instance: 3530e93e-f729-471b-976b-e52c3182cb15] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 563.939150] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4d9c1482-ab74-4f69-8438-670cc92231c6 tempest-VolumesAssistedSnapshotsTest-1011581688 tempest-VolumesAssistedSnapshotsTest-1011581688-project-member] Expecting reply to msg 43f970e11995437795d95100fb269606 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 563.950414] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 43f970e11995437795d95100fb269606 [ 564.032886] env[62096]: DEBUG oslo_concurrency.lockutils [None req-530f9427-aadb-4fc8-9c2d-063e58e29f49 tempest-FloatingIPsAssociationTestJSON-1970391969 tempest-FloatingIPsAssociationTestJSON-1970391969-project-member] Releasing lock "refresh_cache-8001fc1b-fa8c-496d-acc8-922cfd5d5cc2" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 564.033164] env[62096]: DEBUG nova.compute.manager [None req-530f9427-aadb-4fc8-9c2d-063e58e29f49 tempest-FloatingIPsAssociationTestJSON-1970391969 tempest-FloatingIPsAssociationTestJSON-1970391969-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 564.033370] env[62096]: DEBUG nova.compute.manager [None req-530f9427-aadb-4fc8-9c2d-063e58e29f49 tempest-FloatingIPsAssociationTestJSON-1970391969 tempest-FloatingIPsAssociationTestJSON-1970391969-project-member] [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 564.033550] env[62096]: DEBUG nova.network.neutron [None req-530f9427-aadb-4fc8-9c2d-063e58e29f49 tempest-FloatingIPsAssociationTestJSON-1970391969 tempest-FloatingIPsAssociationTestJSON-1970391969-project-member] [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 564.090187] env[62096]: DEBUG nova.network.neutron [None req-530f9427-aadb-4fc8-9c2d-063e58e29f49 tempest-FloatingIPsAssociationTestJSON-1970391969 tempest-FloatingIPsAssociationTestJSON-1970391969-project-member] [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 564.090757] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-530f9427-aadb-4fc8-9c2d-063e58e29f49 tempest-FloatingIPsAssociationTestJSON-1970391969 tempest-FloatingIPsAssociationTestJSON-1970391969-project-member] Expecting reply to msg c8c0d1f09b2d4162884e830c80f68187 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 564.098088] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c8c0d1f09b2d4162884e830c80f68187 [ 564.151378] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb68bae2-ed4a-419a-8f4e-decc48cbda90 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.158487] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65b979e2-0bfa-4bae-ba21-783a7f2ca99d {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.197202] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21219a4d-fd4d-4b1c-b41d-69f7a99fde48 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.211368] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e00ad2a4-bb77-4423-913b-1951497f1448 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.249066] env[62096]: DEBUG nova.compute.provider_tree [None req-298b76a9-0fa7-45b3-84c9-138485632e29 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 564.249575] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-298b76a9-0fa7-45b3-84c9-138485632e29 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Expecting reply to msg dc0139d3c96d4bd098fea71129f0ee44 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 564.259540] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dc0139d3c96d4bd098fea71129f0ee44 [ 564.294121] env[62096]: INFO nova.scheduler.client.report [None req-4495e51c-6f81-4215-8c28-fb72754e88b3 tempest-ServersAdminNegativeTestJSON-706848671 tempest-ServersAdminNegativeTestJSON-706848671-project-member] Deleted allocations for instance ebc7dc27-110e-4edc-87be-cb0ecdbe215f [ 564.304849] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4495e51c-6f81-4215-8c28-fb72754e88b3 tempest-ServersAdminNegativeTestJSON-706848671 tempest-ServersAdminNegativeTestJSON-706848671-project-member] Expecting reply to msg 7bc06162e5ef4885bb1bbd4b5c66d585 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 564.321279] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7bc06162e5ef4885bb1bbd4b5c66d585 [ 564.509204] env[62096]: DEBUG nova.network.neutron [None req-4d9c1482-ab74-4f69-8438-670cc92231c6 tempest-VolumesAssistedSnapshotsTest-1011581688 tempest-VolumesAssistedSnapshotsTest-1011581688-project-member] [instance: 3530e93e-f729-471b-976b-e52c3182cb15] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 564.597920] env[62096]: DEBUG nova.network.neutron [None req-530f9427-aadb-4fc8-9c2d-063e58e29f49 tempest-FloatingIPsAssociationTestJSON-1970391969 tempest-FloatingIPsAssociationTestJSON-1970391969-project-member] [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 564.598475] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-530f9427-aadb-4fc8-9c2d-063e58e29f49 tempest-FloatingIPsAssociationTestJSON-1970391969 tempest-FloatingIPsAssociationTestJSON-1970391969-project-member] Expecting reply to msg 4f97d7923ed54ba6a9f7e251ec9ac1a2 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 564.612355] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4f97d7923ed54ba6a9f7e251ec9ac1a2 [ 564.752379] env[62096]: DEBUG nova.scheduler.client.report [None req-298b76a9-0fa7-45b3-84c9-138485632e29 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 564.754731] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-298b76a9-0fa7-45b3-84c9-138485632e29 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Expecting reply to msg a36dd21af7c64e3492b77b8d64d4233c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 564.766556] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a36dd21af7c64e3492b77b8d64d4233c [ 564.807317] env[62096]: DEBUG oslo_concurrency.lockutils [None req-4495e51c-6f81-4215-8c28-fb72754e88b3 tempest-ServersAdminNegativeTestJSON-706848671 tempest-ServersAdminNegativeTestJSON-706848671-project-member] Lock "ebc7dc27-110e-4edc-87be-cb0ecdbe215f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 35.491s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 564.807870] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c02b4028-73ad-45a0-8da0-06fbbeeeef87 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Expecting reply to msg e3db7c75651b42e0b985a83ee77641c0 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 564.845565] env[62096]: DEBUG nova.network.neutron [None req-4d9c1482-ab74-4f69-8438-670cc92231c6 tempest-VolumesAssistedSnapshotsTest-1011581688 tempest-VolumesAssistedSnapshotsTest-1011581688-project-member] [instance: 3530e93e-f729-471b-976b-e52c3182cb15] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 564.846068] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4d9c1482-ab74-4f69-8438-670cc92231c6 tempest-VolumesAssistedSnapshotsTest-1011581688 tempest-VolumesAssistedSnapshotsTest-1011581688-project-member] Expecting reply to msg e7ef8166ed1341769535311905ab4033 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 564.856466] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e3db7c75651b42e0b985a83ee77641c0 [ 564.856925] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e7ef8166ed1341769535311905ab4033 [ 564.865841] env[62096]: DEBUG nova.compute.manager [req-9c60428c-b830-459e-b56b-c38f0ba0afd7 req-19a3f43b-b3ce-4794-8143-c2ec49158156 service nova] [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] Received event network-vif-deleted-0198948c-99f9-4d41-9309-650d6ba5c5cc {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 564.866047] env[62096]: DEBUG nova.compute.manager [req-9c60428c-b830-459e-b56b-c38f0ba0afd7 req-19a3f43b-b3ce-4794-8143-c2ec49158156 service nova] [instance: 3530e93e-f729-471b-976b-e52c3182cb15] Received event network-changed-5fdd06cc-1563-494e-a278-34fb92f36b0a {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 564.866215] env[62096]: DEBUG nova.compute.manager [req-9c60428c-b830-459e-b56b-c38f0ba0afd7 req-19a3f43b-b3ce-4794-8143-c2ec49158156 service nova] [instance: 3530e93e-f729-471b-976b-e52c3182cb15] Refreshing instance network info cache due to event network-changed-5fdd06cc-1563-494e-a278-34fb92f36b0a. {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 564.866408] env[62096]: DEBUG oslo_concurrency.lockutils [req-9c60428c-b830-459e-b56b-c38f0ba0afd7 req-19a3f43b-b3ce-4794-8143-c2ec49158156 service nova] Acquiring lock "refresh_cache-3530e93e-f729-471b-976b-e52c3182cb15" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 565.104386] env[62096]: INFO nova.compute.manager [None req-530f9427-aadb-4fc8-9c2d-063e58e29f49 tempest-FloatingIPsAssociationTestJSON-1970391969 tempest-FloatingIPsAssociationTestJSON-1970391969-project-member] [instance: 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2] Took 1.07 seconds to deallocate network for instance. [ 565.104386] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-530f9427-aadb-4fc8-9c2d-063e58e29f49 tempest-FloatingIPsAssociationTestJSON-1970391969 tempest-FloatingIPsAssociationTestJSON-1970391969-project-member] Expecting reply to msg 7349843a9199433c9d54584bd2809641 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 565.147883] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7349843a9199433c9d54584bd2809641 [ 565.262967] env[62096]: DEBUG oslo_concurrency.lockutils [None req-298b76a9-0fa7-45b3-84c9-138485632e29 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.390s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 565.262967] env[62096]: DEBUG nova.compute.manager [None req-298b76a9-0fa7-45b3-84c9-138485632e29 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] Start building networks asynchronously for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 565.262967] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-298b76a9-0fa7-45b3-84c9-138485632e29 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Expecting reply to msg 8675f62b40be4a1abaecb7b6efe9f47a in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 565.262967] env[62096]: DEBUG oslo_concurrency.lockutils [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 16.435s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 565.262967] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg 8d97a861cdb949d1ae07c5dc49d379d5 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 565.284659] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8d97a861cdb949d1ae07c5dc49d379d5 [ 565.293614] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8675f62b40be4a1abaecb7b6efe9f47a [ 565.311433] env[62096]: DEBUG nova.compute.manager [None req-c02b4028-73ad-45a0-8da0-06fbbeeeef87 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] Starting instance... {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 565.317201] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c02b4028-73ad-45a0-8da0-06fbbeeeef87 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Expecting reply to msg 7c57f65723184404921eaea75b670d1d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 565.353249] env[62096]: DEBUG oslo_concurrency.lockutils [None req-4d9c1482-ab74-4f69-8438-670cc92231c6 tempest-VolumesAssistedSnapshotsTest-1011581688 tempest-VolumesAssistedSnapshotsTest-1011581688-project-member] Releasing lock "refresh_cache-3530e93e-f729-471b-976b-e52c3182cb15" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 565.353249] env[62096]: DEBUG nova.compute.manager [None req-4d9c1482-ab74-4f69-8438-670cc92231c6 tempest-VolumesAssistedSnapshotsTest-1011581688 tempest-VolumesAssistedSnapshotsTest-1011581688-project-member] [instance: 3530e93e-f729-471b-976b-e52c3182cb15] Start destroying the instance on the hypervisor. {{(pid=62096) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 565.353249] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-4d9c1482-ab74-4f69-8438-670cc92231c6 tempest-VolumesAssistedSnapshotsTest-1011581688 tempest-VolumesAssistedSnapshotsTest-1011581688-project-member] [instance: 3530e93e-f729-471b-976b-e52c3182cb15] Destroying instance {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 565.353249] env[62096]: DEBUG oslo_concurrency.lockutils [req-9c60428c-b830-459e-b56b-c38f0ba0afd7 req-19a3f43b-b3ce-4794-8143-c2ec49158156 service nova] Acquired lock "refresh_cache-3530e93e-f729-471b-976b-e52c3182cb15" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 565.353249] env[62096]: DEBUG nova.network.neutron [req-9c60428c-b830-459e-b56b-c38f0ba0afd7 req-19a3f43b-b3ce-4794-8143-c2ec49158156 service nova] [instance: 3530e93e-f729-471b-976b-e52c3182cb15] Refreshing network info cache for port 5fdd06cc-1563-494e-a278-34fb92f36b0a {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 565.353596] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-9c60428c-b830-459e-b56b-c38f0ba0afd7 req-19a3f43b-b3ce-4794-8143-c2ec49158156 service nova] Expecting reply to msg 1dce801421ef45b983d37b4366c1ba08 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 565.353596] env[62096]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-55e78207-3e25-404a-8273-29c18fcce595 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.359120] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7c57f65723184404921eaea75b670d1d [ 565.364213] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd5a6b41-8723-434d-970f-8c10e0743163 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.374844] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1dce801421ef45b983d37b4366c1ba08 [ 565.388765] env[62096]: WARNING nova.virt.vmwareapi.vmops [None req-4d9c1482-ab74-4f69-8438-670cc92231c6 tempest-VolumesAssistedSnapshotsTest-1011581688 tempest-VolumesAssistedSnapshotsTest-1011581688-project-member] [instance: 3530e93e-f729-471b-976b-e52c3182cb15] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 3530e93e-f729-471b-976b-e52c3182cb15 could not be found. [ 565.389032] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-4d9c1482-ab74-4f69-8438-670cc92231c6 tempest-VolumesAssistedSnapshotsTest-1011581688 tempest-VolumesAssistedSnapshotsTest-1011581688-project-member] [instance: 3530e93e-f729-471b-976b-e52c3182cb15] Instance destroyed {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 565.389252] env[62096]: INFO nova.compute.manager [None req-4d9c1482-ab74-4f69-8438-670cc92231c6 tempest-VolumesAssistedSnapshotsTest-1011581688 tempest-VolumesAssistedSnapshotsTest-1011581688-project-member] [instance: 3530e93e-f729-471b-976b-e52c3182cb15] Took 0.04 seconds to destroy the instance on the hypervisor. [ 565.389531] env[62096]: DEBUG oslo.service.loopingcall [None req-4d9c1482-ab74-4f69-8438-670cc92231c6 tempest-VolumesAssistedSnapshotsTest-1011581688 tempest-VolumesAssistedSnapshotsTest-1011581688-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 565.390140] env[62096]: DEBUG nova.compute.manager [-] [instance: 3530e93e-f729-471b-976b-e52c3182cb15] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 565.390140] env[62096]: DEBUG nova.network.neutron [-] [instance: 3530e93e-f729-471b-976b-e52c3182cb15] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 565.427720] env[62096]: DEBUG nova.network.neutron [-] [instance: 3530e93e-f729-471b-976b-e52c3182cb15] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 565.428318] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg dff5b5f9571d42ea8eaf97662b02f3ad in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 565.441045] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dff5b5f9571d42ea8eaf97662b02f3ad [ 565.608318] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-530f9427-aadb-4fc8-9c2d-063e58e29f49 tempest-FloatingIPsAssociationTestJSON-1970391969 tempest-FloatingIPsAssociationTestJSON-1970391969-project-member] Expecting reply to msg 6adaed1610324a8cb895d21d33b25ed5 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 565.671761] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6adaed1610324a8cb895d21d33b25ed5 [ 565.768224] env[62096]: DEBUG nova.compute.utils [None req-298b76a9-0fa7-45b3-84c9-138485632e29 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Using /dev/sd instead of None {{(pid=62096) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 565.768224] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-298b76a9-0fa7-45b3-84c9-138485632e29 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Expecting reply to msg bb398d4c0b154006a1af47085e80c920 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 565.768224] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg 52dd427cd864471c92b44822cba8579b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 565.773081] env[62096]: DEBUG nova.compute.manager [None req-298b76a9-0fa7-45b3-84c9-138485632e29 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] Allocating IP information in the background. {{(pid=62096) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 565.773081] env[62096]: DEBUG nova.network.neutron [None req-298b76a9-0fa7-45b3-84c9-138485632e29 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] allocate_for_instance() {{(pid=62096) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 565.779466] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bb398d4c0b154006a1af47085e80c920 [ 565.789462] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 52dd427cd864471c92b44822cba8579b [ 565.839794] env[62096]: DEBUG oslo_concurrency.lockutils [None req-c02b4028-73ad-45a0-8da0-06fbbeeeef87 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 565.910120] env[62096]: DEBUG nova.network.neutron [req-9c60428c-b830-459e-b56b-c38f0ba0afd7 req-19a3f43b-b3ce-4794-8143-c2ec49158156 service nova] [instance: 3530e93e-f729-471b-976b-e52c3182cb15] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 565.930563] env[62096]: DEBUG nova.network.neutron [-] [instance: 3530e93e-f729-471b-976b-e52c3182cb15] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 565.930563] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg c8929c06ff294a678ef9e9806bd2da87 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 565.940788] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c8929c06ff294a678ef9e9806bd2da87 [ 566.048615] env[62096]: DEBUG nova.policy [None req-298b76a9-0fa7-45b3-84c9-138485632e29 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8fdb28d80ba747189147a109e83d6b8e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '962f0302df2a4282a6fadece663807fe', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62096) authorize /opt/stack/nova/nova/policy.py:203}} [ 566.134715] env[62096]: INFO nova.scheduler.client.report [None req-530f9427-aadb-4fc8-9c2d-063e58e29f49 tempest-FloatingIPsAssociationTestJSON-1970391969 tempest-FloatingIPsAssociationTestJSON-1970391969-project-member] Deleted allocations for instance 8001fc1b-fa8c-496d-acc8-922cfd5d5cc2 [ 566.141416] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-530f9427-aadb-4fc8-9c2d-063e58e29f49 tempest-FloatingIPsAssociationTestJSON-1970391969 tempest-FloatingIPsAssociationTestJSON-1970391969-project-member] Expecting reply to msg bd0f40d7e6344d139b16832396a114e7 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 566.165787] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bd0f40d7e6344d139b16832396a114e7 [ 566.269855] env[62096]: DEBUG nova.compute.manager [None req-298b76a9-0fa7-45b3-84c9-138485632e29 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] Start building block device mappings for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 566.271674] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-298b76a9-0fa7-45b3-84c9-138485632e29 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Expecting reply to msg c388bc5904f9470aba9e3c0c49b4391e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 566.292277] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Instance 9ebe9a03-3fc2-4a77-bb2b-d220e9693115 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62096) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 566.292434] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Instance 773f55a5-d40c-445e-b02a-268d4f88fdd7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62096) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 566.292557] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Instance 3530e93e-f729-471b-976b-e52c3182cb15 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62096) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 566.292675] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Instance 94fbc312-96fb-4b62-adc7-18053d509eca actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62096) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 566.293245] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg 8bc52629d2c846cfa12c83c7b5be3fcc in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 566.304917] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c388bc5904f9470aba9e3c0c49b4391e [ 566.318062] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8bc52629d2c846cfa12c83c7b5be3fcc [ 566.432520] env[62096]: INFO nova.compute.manager [-] [instance: 3530e93e-f729-471b-976b-e52c3182cb15] Took 1.04 seconds to deallocate network for instance. [ 566.435081] env[62096]: DEBUG nova.compute.claims [None req-4d9c1482-ab74-4f69-8438-670cc92231c6 tempest-VolumesAssistedSnapshotsTest-1011581688 tempest-VolumesAssistedSnapshotsTest-1011581688-project-member] [instance: 3530e93e-f729-471b-976b-e52c3182cb15] Aborting claim: {{(pid=62096) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 566.435268] env[62096]: DEBUG oslo_concurrency.lockutils [None req-4d9c1482-ab74-4f69-8438-670cc92231c6 tempest-VolumesAssistedSnapshotsTest-1011581688 tempest-VolumesAssistedSnapshotsTest-1011581688-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 566.454068] env[62096]: DEBUG nova.network.neutron [req-9c60428c-b830-459e-b56b-c38f0ba0afd7 req-19a3f43b-b3ce-4794-8143-c2ec49158156 service nova] [instance: 3530e93e-f729-471b-976b-e52c3182cb15] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 566.454565] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-9c60428c-b830-459e-b56b-c38f0ba0afd7 req-19a3f43b-b3ce-4794-8143-c2ec49158156 service nova] Expecting reply to msg 4823688d4cc04b35a9b1945f021c6c42 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 566.462608] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4823688d4cc04b35a9b1945f021c6c42 [ 566.653599] env[62096]: DEBUG oslo_concurrency.lockutils [None req-530f9427-aadb-4fc8-9c2d-063e58e29f49 tempest-FloatingIPsAssociationTestJSON-1970391969 tempest-FloatingIPsAssociationTestJSON-1970391969-project-member] Lock "8001fc1b-fa8c-496d-acc8-922cfd5d5cc2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 36.408s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 566.663171] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-bff733c1-b59e-456d-a598-27648ab77c9f tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Expecting reply to msg d577422167904980a3f14d8471b5924d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 566.674872] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d577422167904980a3f14d8471b5924d [ 566.776397] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-298b76a9-0fa7-45b3-84c9-138485632e29 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Expecting reply to msg d52d87871e9f491980ccce452d764dbf in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 566.797720] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Instance fcdeb1ed-7f21-4338-8964-63d16d275bc2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62096) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 566.797720] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg f706ac04854f48d9b2308c0d2b0705cb in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 566.809126] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f706ac04854f48d9b2308c0d2b0705cb [ 566.820051] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d52d87871e9f491980ccce452d764dbf [ 566.956669] env[62096]: DEBUG oslo_concurrency.lockutils [req-9c60428c-b830-459e-b56b-c38f0ba0afd7 req-19a3f43b-b3ce-4794-8143-c2ec49158156 service nova] Releasing lock "refresh_cache-3530e93e-f729-471b-976b-e52c3182cb15" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 567.161865] env[62096]: DEBUG nova.compute.manager [None req-bff733c1-b59e-456d-a598-27648ab77c9f tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] Starting instance... {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 567.161865] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-bff733c1-b59e-456d-a598-27648ab77c9f tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Expecting reply to msg d38a827f247548e6a3c6cc970e15c2cd in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 567.285192] env[62096]: DEBUG nova.compute.manager [None req-298b76a9-0fa7-45b3-84c9-138485632e29 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] Start spawning the instance on the hypervisor. {{(pid=62096) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 567.301139] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d38a827f247548e6a3c6cc970e15c2cd [ 567.301986] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Instance 1377dbd7-9025-4683-be56-6e8987a5d72e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62096) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 567.302547] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg 3e5670d020274361b7b815be9ab2dccf in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 567.353632] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3e5670d020274361b7b815be9ab2dccf [ 567.366406] env[62096]: DEBUG nova.virt.hardware [None req-298b76a9-0fa7-45b3-84c9-138485632e29 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-19T11:52:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-19T11:51:55Z,direct_url=,disk_format='vmdk',id=fb764baa-3805-45c4-a694-aa91b0932110,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bd76085d004b408bbcf1ebf76ba2593e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-19T11:51:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 567.368036] env[62096]: DEBUG nova.virt.hardware [None req-298b76a9-0fa7-45b3-84c9-138485632e29 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Flavor limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 567.368036] env[62096]: DEBUG nova.virt.hardware [None req-298b76a9-0fa7-45b3-84c9-138485632e29 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Image limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 567.368036] env[62096]: DEBUG nova.virt.hardware [None req-298b76a9-0fa7-45b3-84c9-138485632e29 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Flavor pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 567.368036] env[62096]: DEBUG nova.virt.hardware [None req-298b76a9-0fa7-45b3-84c9-138485632e29 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Image pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 567.368036] env[62096]: DEBUG nova.virt.hardware [None req-298b76a9-0fa7-45b3-84c9-138485632e29 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 567.368669] env[62096]: DEBUG nova.virt.hardware [None req-298b76a9-0fa7-45b3-84c9-138485632e29 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 567.368669] env[62096]: DEBUG nova.virt.hardware [None req-298b76a9-0fa7-45b3-84c9-138485632e29 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 567.368669] env[62096]: DEBUG nova.virt.hardware [None req-298b76a9-0fa7-45b3-84c9-138485632e29 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Got 1 possible topologies {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 567.368669] env[62096]: DEBUG nova.virt.hardware [None req-298b76a9-0fa7-45b3-84c9-138485632e29 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 567.368669] env[62096]: DEBUG nova.virt.hardware [None req-298b76a9-0fa7-45b3-84c9-138485632e29 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 567.368934] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2031839-670f-4c11-9cee-de46b7210130 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.379674] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2f6c4a7-e0fb-48c9-98c5-ed0dc40aa6de {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.585386] env[62096]: DEBUG oslo_concurrency.lockutils [None req-b15e5306-4a15-4327-a851-ae7862f80688 tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Acquiring lock "e48a5bde-e299-4567-b952-3c5f096fb65d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 567.585605] env[62096]: DEBUG oslo_concurrency.lockutils [None req-b15e5306-4a15-4327-a851-ae7862f80688 tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Lock "e48a5bde-e299-4567-b952-3c5f096fb65d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 567.688970] env[62096]: DEBUG oslo_concurrency.lockutils [None req-bff733c1-b59e-456d-a598-27648ab77c9f tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 567.809584] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Instance 09410bd9-aa4e-49a8-86fb-8058b842bd72 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62096) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 567.810193] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg afbc2d1e085e429dbbd321a92e59702d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 567.825452] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg afbc2d1e085e429dbbd321a92e59702d [ 568.138127] env[62096]: DEBUG nova.network.neutron [None req-298b76a9-0fa7-45b3-84c9-138485632e29 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] Successfully created port: 3d2c2a0a-10a1-4b8d-b866-46b384c96647 {{(pid=62096) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 568.313197] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Instance 4fca4cf6-1c0e-487e-bf26-fc441d143128 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62096) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 568.313783] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg 0e8a7ad61d67471c9964088af075bebd in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 568.328533] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0e8a7ad61d67471c9964088af075bebd [ 568.817244] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Instance 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62096) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 568.817647] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg 6c0321f7057147cf960b715ad4a1784d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 568.826381] env[62096]: DEBUG nova.compute.manager [req-5c68f5ac-b912-4135-8c96-33aae311d656 req-cdac6d76-cefb-41c0-a7eb-719ca431540a service nova] [instance: 3530e93e-f729-471b-976b-e52c3182cb15] Received event network-vif-deleted-5fdd06cc-1563-494e-a278-34fb92f36b0a {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 568.842618] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6c0321f7057147cf960b715ad4a1784d [ 569.320620] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Instance 6ac3df26-83ab-4519-a1e2-51286c1d1991 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62096) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 569.321237] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg a4ab9b2f711a4a6998b1ce79e4d36154 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 569.339709] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a4ab9b2f711a4a6998b1ce79e4d36154 [ 569.498140] env[62096]: DEBUG oslo_concurrency.lockutils [None req-b165e3d0-efb4-4eaf-a7d6-1c4c9099d3ba tempest-ImagesOneServerTestJSON-1002718403 tempest-ImagesOneServerTestJSON-1002718403-project-member] Acquiring lock "69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 569.498509] env[62096]: DEBUG oslo_concurrency.lockutils [None req-b165e3d0-efb4-4eaf-a7d6-1c4c9099d3ba tempest-ImagesOneServerTestJSON-1002718403 tempest-ImagesOneServerTestJSON-1002718403-project-member] Lock "69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 569.824209] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Instance 3855c98d-4ade-4f6f-85aa-1297df5a39a9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62096) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 569.824846] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg 246e55723ac244478353f08134ebcc6e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 569.835896] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 246e55723ac244478353f08134ebcc6e [ 570.329379] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Instance 12a4b44d-b6aa-45e2-bba9-d73f41fa4b61 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62096) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 570.329379] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg 93f68474890748b8b16faa873eb2d66c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 570.339315] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 93f68474890748b8b16faa873eb2d66c [ 570.832943] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Instance 9bc49e7a-af22-4112-8609-348605599692 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62096) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 570.832943] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg 20589a20237d4009ae6ca20644be10b5 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 570.845774] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 20589a20237d4009ae6ca20644be10b5 [ 571.338199] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Instance 805c4c87-4e1a-4733-86a0-4c82daf615eb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62096) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 571.338199] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Total usable vcpus: 48, total allocated vcpus: 4 {{(pid=62096) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 571.338199] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1280MB phys_disk=200GB used_disk=4GB total_vcpus=48 used_vcpus=4 pci_stats=[] {{(pid=62096) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 571.725653] env[62096]: DEBUG oslo_concurrency.lockutils [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Acquiring lock "27d12301-a049-4d1e-b171-a09a642703fb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 571.725902] env[62096]: DEBUG oslo_concurrency.lockutils [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Lock "27d12301-a049-4d1e-b171-a09a642703fb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 571.752699] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2fbfc14-8c9a-45ca-b790-f6521e8e6140 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.762719] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3f4b7e2-e257-490f-a223-6e7192bc4539 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.768494] env[62096]: DEBUG oslo_concurrency.lockutils [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Acquiring lock "d9562762-52de-4a0c-b6a2-2aeaa20e47a0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 571.768729] env[62096]: DEBUG oslo_concurrency.lockutils [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Lock "d9562762-52de-4a0c-b6a2-2aeaa20e47a0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 571.804387] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44e231bb-1c03-4ff1-8690-3a6204430ed3 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.811147] env[62096]: DEBUG oslo_concurrency.lockutils [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Acquiring lock "bbed3eed-f511-4b9e-9632-74841df01592" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 571.811366] env[62096]: DEBUG oslo_concurrency.lockutils [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Lock "bbed3eed-f511-4b9e-9632-74841df01592" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 571.815433] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d91cfb5-cb92-4b07-8a33-5456129689b5 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.829807] env[62096]: DEBUG nova.compute.provider_tree [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 571.830393] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg b620235ef8564386b66a8adfb570ee24 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 571.849203] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b620235ef8564386b66a8adfb570ee24 [ 572.342592] env[62096]: DEBUG nova.scheduler.client.report [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 572.345089] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg 327c9018e8094cf093296655bb59513d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 572.360565] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 327c9018e8094cf093296655bb59513d [ 572.478258] env[62096]: DEBUG nova.compute.manager [req-35ed5989-0648-4829-afaa-0555b4074ef3 req-42a4a271-ec42-47d6-a57e-f0cb3dbd9210 service nova] [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] Received event network-changed-3d2c2a0a-10a1-4b8d-b866-46b384c96647 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 572.478450] env[62096]: DEBUG nova.compute.manager [req-35ed5989-0648-4829-afaa-0555b4074ef3 req-42a4a271-ec42-47d6-a57e-f0cb3dbd9210 service nova] [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] Refreshing instance network info cache due to event network-changed-3d2c2a0a-10a1-4b8d-b866-46b384c96647. {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 572.478662] env[62096]: DEBUG oslo_concurrency.lockutils [req-35ed5989-0648-4829-afaa-0555b4074ef3 req-42a4a271-ec42-47d6-a57e-f0cb3dbd9210 service nova] Acquiring lock "refresh_cache-94fbc312-96fb-4b62-adc7-18053d509eca" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 572.478798] env[62096]: DEBUG oslo_concurrency.lockutils [req-35ed5989-0648-4829-afaa-0555b4074ef3 req-42a4a271-ec42-47d6-a57e-f0cb3dbd9210 service nova] Acquired lock "refresh_cache-94fbc312-96fb-4b62-adc7-18053d509eca" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 572.478951] env[62096]: DEBUG nova.network.neutron [req-35ed5989-0648-4829-afaa-0555b4074ef3 req-42a4a271-ec42-47d6-a57e-f0cb3dbd9210 service nova] [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] Refreshing network info cache for port 3d2c2a0a-10a1-4b8d-b866-46b384c96647 {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 572.479427] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-35ed5989-0648-4829-afaa-0555b4074ef3 req-42a4a271-ec42-47d6-a57e-f0cb3dbd9210 service nova] Expecting reply to msg a51008b5b3cc4477b435bc185fd29821 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 572.489555] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a51008b5b3cc4477b435bc185fd29821 [ 572.582099] env[62096]: ERROR nova.compute.manager [None req-298b76a9-0fa7-45b3-84c9-138485632e29 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 3d2c2a0a-10a1-4b8d-b866-46b384c96647, please check neutron logs for more information. [ 572.582099] env[62096]: ERROR nova.compute.manager Traceback (most recent call last): [ 572.582099] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 572.582099] env[62096]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 572.582099] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 572.582099] env[62096]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 572.582099] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 572.582099] env[62096]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 572.582099] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 572.582099] env[62096]: ERROR nova.compute.manager self.force_reraise() [ 572.582099] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 572.582099] env[62096]: ERROR nova.compute.manager raise self.value [ 572.582099] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 572.582099] env[62096]: ERROR nova.compute.manager updated_port = self._update_port( [ 572.582099] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 572.582099] env[62096]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 572.582602] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 572.582602] env[62096]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 572.582602] env[62096]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 3d2c2a0a-10a1-4b8d-b866-46b384c96647, please check neutron logs for more information. [ 572.582602] env[62096]: ERROR nova.compute.manager [ 572.582602] env[62096]: Traceback (most recent call last): [ 572.582602] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 572.582602] env[62096]: listener.cb(fileno) [ 572.582602] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 572.582602] env[62096]: result = function(*args, **kwargs) [ 572.582602] env[62096]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 572.582602] env[62096]: return func(*args, **kwargs) [ 572.582602] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 572.582602] env[62096]: raise e [ 572.582602] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 572.582602] env[62096]: nwinfo = self.network_api.allocate_for_instance( [ 572.582602] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 572.582602] env[62096]: created_port_ids = self._update_ports_for_instance( [ 572.582602] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 572.582602] env[62096]: with excutils.save_and_reraise_exception(): [ 572.582602] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 572.582602] env[62096]: self.force_reraise() [ 572.582602] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 572.582602] env[62096]: raise self.value [ 572.582602] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 572.582602] env[62096]: updated_port = self._update_port( [ 572.582602] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 572.582602] env[62096]: _ensure_no_port_binding_failure(port) [ 572.582602] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 572.582602] env[62096]: raise exception.PortBindingFailed(port_id=port['id']) [ 572.583556] env[62096]: nova.exception.PortBindingFailed: Binding failed for port 3d2c2a0a-10a1-4b8d-b866-46b384c96647, please check neutron logs for more information. [ 572.583556] env[62096]: Removing descriptor: 19 [ 572.583556] env[62096]: ERROR nova.compute.manager [None req-298b76a9-0fa7-45b3-84c9-138485632e29 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 3d2c2a0a-10a1-4b8d-b866-46b384c96647, please check neutron logs for more information. [ 572.583556] env[62096]: ERROR nova.compute.manager [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] Traceback (most recent call last): [ 572.583556] env[62096]: ERROR nova.compute.manager [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 572.583556] env[62096]: ERROR nova.compute.manager [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] yield resources [ 572.583556] env[62096]: ERROR nova.compute.manager [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 572.583556] env[62096]: ERROR nova.compute.manager [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] self.driver.spawn(context, instance, image_meta, [ 572.583556] env[62096]: ERROR nova.compute.manager [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 572.583556] env[62096]: ERROR nova.compute.manager [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] self._vmops.spawn(context, instance, image_meta, injected_files, [ 572.583556] env[62096]: ERROR nova.compute.manager [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 572.583556] env[62096]: ERROR nova.compute.manager [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] vm_ref = self.build_virtual_machine(instance, [ 572.584073] env[62096]: ERROR nova.compute.manager [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 572.584073] env[62096]: ERROR nova.compute.manager [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] vif_infos = vmwarevif.get_vif_info(self._session, [ 572.584073] env[62096]: ERROR nova.compute.manager [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 572.584073] env[62096]: ERROR nova.compute.manager [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] for vif in network_info: [ 572.584073] env[62096]: ERROR nova.compute.manager [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 572.584073] env[62096]: ERROR nova.compute.manager [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] return self._sync_wrapper(fn, *args, **kwargs) [ 572.584073] env[62096]: ERROR nova.compute.manager [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 572.584073] env[62096]: ERROR nova.compute.manager [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] self.wait() [ 572.584073] env[62096]: ERROR nova.compute.manager [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 572.584073] env[62096]: ERROR nova.compute.manager [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] self[:] = self._gt.wait() [ 572.584073] env[62096]: ERROR nova.compute.manager [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 572.584073] env[62096]: ERROR nova.compute.manager [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] return self._exit_event.wait() [ 572.584073] env[62096]: ERROR nova.compute.manager [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 572.584458] env[62096]: ERROR nova.compute.manager [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] result = hub.switch() [ 572.584458] env[62096]: ERROR nova.compute.manager [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 572.584458] env[62096]: ERROR nova.compute.manager [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] return self.greenlet.switch() [ 572.584458] env[62096]: ERROR nova.compute.manager [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 572.584458] env[62096]: ERROR nova.compute.manager [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] result = function(*args, **kwargs) [ 572.584458] env[62096]: ERROR nova.compute.manager [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 572.584458] env[62096]: ERROR nova.compute.manager [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] return func(*args, **kwargs) [ 572.584458] env[62096]: ERROR nova.compute.manager [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 572.584458] env[62096]: ERROR nova.compute.manager [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] raise e [ 572.584458] env[62096]: ERROR nova.compute.manager [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 572.584458] env[62096]: ERROR nova.compute.manager [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] nwinfo = self.network_api.allocate_for_instance( [ 572.584458] env[62096]: ERROR nova.compute.manager [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 572.584458] env[62096]: ERROR nova.compute.manager [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] created_port_ids = self._update_ports_for_instance( [ 572.584895] env[62096]: ERROR nova.compute.manager [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 572.584895] env[62096]: ERROR nova.compute.manager [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] with excutils.save_and_reraise_exception(): [ 572.584895] env[62096]: ERROR nova.compute.manager [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 572.584895] env[62096]: ERROR nova.compute.manager [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] self.force_reraise() [ 572.584895] env[62096]: ERROR nova.compute.manager [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 572.584895] env[62096]: ERROR nova.compute.manager [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] raise self.value [ 572.584895] env[62096]: ERROR nova.compute.manager [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 572.584895] env[62096]: ERROR nova.compute.manager [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] updated_port = self._update_port( [ 572.584895] env[62096]: ERROR nova.compute.manager [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 572.584895] env[62096]: ERROR nova.compute.manager [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] _ensure_no_port_binding_failure(port) [ 572.584895] env[62096]: ERROR nova.compute.manager [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 572.584895] env[62096]: ERROR nova.compute.manager [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] raise exception.PortBindingFailed(port_id=port['id']) [ 572.585258] env[62096]: ERROR nova.compute.manager [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] nova.exception.PortBindingFailed: Binding failed for port 3d2c2a0a-10a1-4b8d-b866-46b384c96647, please check neutron logs for more information. [ 572.585258] env[62096]: ERROR nova.compute.manager [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] [ 572.585258] env[62096]: INFO nova.compute.manager [None req-298b76a9-0fa7-45b3-84c9-138485632e29 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] Terminating instance [ 572.586287] env[62096]: DEBUG oslo_concurrency.lockutils [None req-298b76a9-0fa7-45b3-84c9-138485632e29 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Acquiring lock "refresh_cache-94fbc312-96fb-4b62-adc7-18053d509eca" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 572.848204] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62096) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 572.848320] env[62096]: DEBUG oslo_concurrency.lockutils [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 7.588s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 572.848586] env[62096]: DEBUG oslo_concurrency.lockutils [None req-f939c6b6-f865-40df-a5aa-fec5b14620ab tempest-InstanceActionsNegativeTestJSON-1063701282 tempest-InstanceActionsNegativeTestJSON-1063701282-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 21.036s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 572.850460] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-f939c6b6-f865-40df-a5aa-fec5b14620ab tempest-InstanceActionsNegativeTestJSON-1063701282 tempest-InstanceActionsNegativeTestJSON-1063701282-project-member] Expecting reply to msg b6d1463123b1497f9f36da65011ee59d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 572.905577] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b6d1463123b1497f9f36da65011ee59d [ 573.055932] env[62096]: DEBUG nova.network.neutron [req-35ed5989-0648-4829-afaa-0555b4074ef3 req-42a4a271-ec42-47d6-a57e-f0cb3dbd9210 service nova] [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 573.635474] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87e867c7-0161-445c-ae88-e9048844e84f {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.642897] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-772007a5-832e-4c8e-aa85-a18f316c5d91 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.674620] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cfd2f6d-0d44-489d-bbe1-b4ac9a1d685b {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.682093] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caeadb70-e5f1-4c16-a1a6-ba5a568a8e88 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.705370] env[62096]: DEBUG nova.compute.provider_tree [None req-f939c6b6-f865-40df-a5aa-fec5b14620ab tempest-InstanceActionsNegativeTestJSON-1063701282 tempest-InstanceActionsNegativeTestJSON-1063701282-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 573.705909] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-f939c6b6-f865-40df-a5aa-fec5b14620ab tempest-InstanceActionsNegativeTestJSON-1063701282 tempest-InstanceActionsNegativeTestJSON-1063701282-project-member] Expecting reply to msg 25873b1135404cd1ba01fb87f2606704 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 573.714204] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 25873b1135404cd1ba01fb87f2606704 [ 573.736403] env[62096]: DEBUG nova.network.neutron [req-35ed5989-0648-4829-afaa-0555b4074ef3 req-42a4a271-ec42-47d6-a57e-f0cb3dbd9210 service nova] [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 573.736909] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-35ed5989-0648-4829-afaa-0555b4074ef3 req-42a4a271-ec42-47d6-a57e-f0cb3dbd9210 service nova] Expecting reply to msg 953ba7ae1a5c4a8ab9db9e13b28a5262 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 573.746613] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 953ba7ae1a5c4a8ab9db9e13b28a5262 [ 574.214968] env[62096]: DEBUG nova.scheduler.client.report [None req-f939c6b6-f865-40df-a5aa-fec5b14620ab tempest-InstanceActionsNegativeTestJSON-1063701282 tempest-InstanceActionsNegativeTestJSON-1063701282-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 574.217306] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-f939c6b6-f865-40df-a5aa-fec5b14620ab tempest-InstanceActionsNegativeTestJSON-1063701282 tempest-InstanceActionsNegativeTestJSON-1063701282-project-member] Expecting reply to msg 164f25739e9d48d6ad705d33eec66cab in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 574.230809] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 164f25739e9d48d6ad705d33eec66cab [ 574.239397] env[62096]: DEBUG oslo_concurrency.lockutils [req-35ed5989-0648-4829-afaa-0555b4074ef3 req-42a4a271-ec42-47d6-a57e-f0cb3dbd9210 service nova] Releasing lock "refresh_cache-94fbc312-96fb-4b62-adc7-18053d509eca" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 574.240061] env[62096]: DEBUG oslo_concurrency.lockutils [None req-298b76a9-0fa7-45b3-84c9-138485632e29 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Acquired lock "refresh_cache-94fbc312-96fb-4b62-adc7-18053d509eca" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 574.240246] env[62096]: DEBUG nova.network.neutron [None req-298b76a9-0fa7-45b3-84c9-138485632e29 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 574.240678] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-298b76a9-0fa7-45b3-84c9-138485632e29 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Expecting reply to msg 3008eaa49721467db24b69ec5d1f86b3 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 574.247958] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3008eaa49721467db24b69ec5d1f86b3 [ 574.720658] env[62096]: DEBUG oslo_concurrency.lockutils [None req-f939c6b6-f865-40df-a5aa-fec5b14620ab tempest-InstanceActionsNegativeTestJSON-1063701282 tempest-InstanceActionsNegativeTestJSON-1063701282-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.872s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 574.721530] env[62096]: ERROR nova.compute.manager [None req-f939c6b6-f865-40df-a5aa-fec5b14620ab tempest-InstanceActionsNegativeTestJSON-1063701282 tempest-InstanceActionsNegativeTestJSON-1063701282-project-member] [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 04bdae88-f432-4b2b-bd3c-e8b1341b7a76, please check neutron logs for more information. [ 574.721530] env[62096]: ERROR nova.compute.manager [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] Traceback (most recent call last): [ 574.721530] env[62096]: ERROR nova.compute.manager [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 574.721530] env[62096]: ERROR nova.compute.manager [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] self.driver.spawn(context, instance, image_meta, [ 574.721530] env[62096]: ERROR nova.compute.manager [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 574.721530] env[62096]: ERROR nova.compute.manager [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] self._vmops.spawn(context, instance, image_meta, injected_files, [ 574.721530] env[62096]: ERROR nova.compute.manager [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 574.721530] env[62096]: ERROR nova.compute.manager [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] vm_ref = self.build_virtual_machine(instance, [ 574.721530] env[62096]: ERROR nova.compute.manager [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 574.721530] env[62096]: ERROR nova.compute.manager [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] vif_infos = vmwarevif.get_vif_info(self._session, [ 574.721530] env[62096]: ERROR nova.compute.manager [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 574.721943] env[62096]: ERROR nova.compute.manager [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] for vif in network_info: [ 574.721943] env[62096]: ERROR nova.compute.manager [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 574.721943] env[62096]: ERROR nova.compute.manager [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] return self._sync_wrapper(fn, *args, **kwargs) [ 574.721943] env[62096]: ERROR nova.compute.manager [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 574.721943] env[62096]: ERROR nova.compute.manager [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] self.wait() [ 574.721943] env[62096]: ERROR nova.compute.manager [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 574.721943] env[62096]: ERROR nova.compute.manager [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] self[:] = self._gt.wait() [ 574.721943] env[62096]: ERROR nova.compute.manager [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 574.721943] env[62096]: ERROR nova.compute.manager [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] return self._exit_event.wait() [ 574.721943] env[62096]: ERROR nova.compute.manager [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 574.721943] env[62096]: ERROR nova.compute.manager [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] result = hub.switch() [ 574.721943] env[62096]: ERROR nova.compute.manager [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 574.721943] env[62096]: ERROR nova.compute.manager [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] return self.greenlet.switch() [ 574.722316] env[62096]: ERROR nova.compute.manager [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 574.722316] env[62096]: ERROR nova.compute.manager [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] result = function(*args, **kwargs) [ 574.722316] env[62096]: ERROR nova.compute.manager [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 574.722316] env[62096]: ERROR nova.compute.manager [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] return func(*args, **kwargs) [ 574.722316] env[62096]: ERROR nova.compute.manager [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 574.722316] env[62096]: ERROR nova.compute.manager [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] raise e [ 574.722316] env[62096]: ERROR nova.compute.manager [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 574.722316] env[62096]: ERROR nova.compute.manager [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] nwinfo = self.network_api.allocate_for_instance( [ 574.722316] env[62096]: ERROR nova.compute.manager [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 574.722316] env[62096]: ERROR nova.compute.manager [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] created_port_ids = self._update_ports_for_instance( [ 574.722316] env[62096]: ERROR nova.compute.manager [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 574.722316] env[62096]: ERROR nova.compute.manager [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] with excutils.save_and_reraise_exception(): [ 574.722316] env[62096]: ERROR nova.compute.manager [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 574.722692] env[62096]: ERROR nova.compute.manager [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] self.force_reraise() [ 574.722692] env[62096]: ERROR nova.compute.manager [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 574.722692] env[62096]: ERROR nova.compute.manager [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] raise self.value [ 574.722692] env[62096]: ERROR nova.compute.manager [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 574.722692] env[62096]: ERROR nova.compute.manager [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] updated_port = self._update_port( [ 574.722692] env[62096]: ERROR nova.compute.manager [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 574.722692] env[62096]: ERROR nova.compute.manager [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] _ensure_no_port_binding_failure(port) [ 574.722692] env[62096]: ERROR nova.compute.manager [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 574.722692] env[62096]: ERROR nova.compute.manager [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] raise exception.PortBindingFailed(port_id=port['id']) [ 574.722692] env[62096]: ERROR nova.compute.manager [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] nova.exception.PortBindingFailed: Binding failed for port 04bdae88-f432-4b2b-bd3c-e8b1341b7a76, please check neutron logs for more information. [ 574.722692] env[62096]: ERROR nova.compute.manager [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] [ 574.722996] env[62096]: DEBUG nova.compute.utils [None req-f939c6b6-f865-40df-a5aa-fec5b14620ab tempest-InstanceActionsNegativeTestJSON-1063701282 tempest-InstanceActionsNegativeTestJSON-1063701282-project-member] [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] Binding failed for port 04bdae88-f432-4b2b-bd3c-e8b1341b7a76, please check neutron logs for more information. {{(pid=62096) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 574.723435] env[62096]: DEBUG oslo_concurrency.lockutils [None req-d85fd46d-4576-455c-a571-1e0033669f66 tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.871s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 574.724868] env[62096]: INFO nova.compute.claims [None req-d85fd46d-4576-455c-a571-1e0033669f66 tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 574.726508] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d85fd46d-4576-455c-a571-1e0033669f66 tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Expecting reply to msg e4a58631775f4b80b141f94dca26998c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 574.727673] env[62096]: DEBUG nova.compute.manager [None req-f939c6b6-f865-40df-a5aa-fec5b14620ab tempest-InstanceActionsNegativeTestJSON-1063701282 tempest-InstanceActionsNegativeTestJSON-1063701282-project-member] [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] Build of instance 9ebe9a03-3fc2-4a77-bb2b-d220e9693115 was re-scheduled: Binding failed for port 04bdae88-f432-4b2b-bd3c-e8b1341b7a76, please check neutron logs for more information. {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 574.728140] env[62096]: DEBUG nova.compute.manager [None req-f939c6b6-f865-40df-a5aa-fec5b14620ab tempest-InstanceActionsNegativeTestJSON-1063701282 tempest-InstanceActionsNegativeTestJSON-1063701282-project-member] [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] Unplugging VIFs for instance {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 574.728375] env[62096]: DEBUG oslo_concurrency.lockutils [None req-f939c6b6-f865-40df-a5aa-fec5b14620ab tempest-InstanceActionsNegativeTestJSON-1063701282 tempest-InstanceActionsNegativeTestJSON-1063701282-project-member] Acquiring lock "refresh_cache-9ebe9a03-3fc2-4a77-bb2b-d220e9693115" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 574.728565] env[62096]: DEBUG oslo_concurrency.lockutils [None req-f939c6b6-f865-40df-a5aa-fec5b14620ab tempest-InstanceActionsNegativeTestJSON-1063701282 tempest-InstanceActionsNegativeTestJSON-1063701282-project-member] Acquired lock "refresh_cache-9ebe9a03-3fc2-4a77-bb2b-d220e9693115" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 574.728727] env[62096]: DEBUG nova.network.neutron [None req-f939c6b6-f865-40df-a5aa-fec5b14620ab tempest-InstanceActionsNegativeTestJSON-1063701282 tempest-InstanceActionsNegativeTestJSON-1063701282-project-member] [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 574.729090] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-f939c6b6-f865-40df-a5aa-fec5b14620ab tempest-InstanceActionsNegativeTestJSON-1063701282 tempest-InstanceActionsNegativeTestJSON-1063701282-project-member] Expecting reply to msg e800d8224dc147a6bb2739c270074799 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 574.735672] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e800d8224dc147a6bb2739c270074799 [ 574.777474] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e4a58631775f4b80b141f94dca26998c [ 574.785232] env[62096]: DEBUG nova.network.neutron [None req-298b76a9-0fa7-45b3-84c9-138485632e29 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 574.976610] env[62096]: DEBUG nova.network.neutron [None req-298b76a9-0fa7-45b3-84c9-138485632e29 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 574.976610] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-298b76a9-0fa7-45b3-84c9-138485632e29 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Expecting reply to msg abc9905585a74d4cbaceffc0e8e3dd38 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 574.985626] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg abc9905585a74d4cbaceffc0e8e3dd38 [ 575.236439] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d85fd46d-4576-455c-a571-1e0033669f66 tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Expecting reply to msg a7b82cacb252430aac140297772f8153 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 575.252458] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a7b82cacb252430aac140297772f8153 [ 575.281951] env[62096]: DEBUG oslo_concurrency.lockutils [None req-104fcf9b-d4a1-4685-8056-e7a4d7fd3107 tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] Acquiring lock "06b9105a-8dcb-4bff-bba2-05e179036f24" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 575.281951] env[62096]: DEBUG oslo_concurrency.lockutils [None req-104fcf9b-d4a1-4685-8056-e7a4d7fd3107 tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] Lock "06b9105a-8dcb-4bff-bba2-05e179036f24" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 575.479139] env[62096]: DEBUG oslo_concurrency.lockutils [None req-298b76a9-0fa7-45b3-84c9-138485632e29 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Releasing lock "refresh_cache-94fbc312-96fb-4b62-adc7-18053d509eca" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 575.479708] env[62096]: DEBUG nova.compute.manager [None req-298b76a9-0fa7-45b3-84c9-138485632e29 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] Start destroying the instance on the hypervisor. {{(pid=62096) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 575.479799] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-298b76a9-0fa7-45b3-84c9-138485632e29 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] Destroying instance {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 575.480112] env[62096]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-853f2fdd-20ff-4c77-a2b8-7ae7df8c52bd {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.489972] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5e3b5b0-fd45-4ab0-a6e5-849739a64343 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.514477] env[62096]: WARNING nova.virt.vmwareapi.vmops [None req-298b76a9-0fa7-45b3-84c9-138485632e29 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 94fbc312-96fb-4b62-adc7-18053d509eca could not be found. [ 575.514743] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-298b76a9-0fa7-45b3-84c9-138485632e29 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] Instance destroyed {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 575.514957] env[62096]: INFO nova.compute.manager [None req-298b76a9-0fa7-45b3-84c9-138485632e29 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] Took 0.04 seconds to destroy the instance on the hypervisor. [ 575.515229] env[62096]: DEBUG oslo.service.loopingcall [None req-298b76a9-0fa7-45b3-84c9-138485632e29 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 575.515459] env[62096]: DEBUG nova.compute.manager [-] [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 575.515549] env[62096]: DEBUG nova.network.neutron [-] [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 575.563648] env[62096]: DEBUG nova.network.neutron [-] [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 575.564239] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg d5590e0e64814e0b90dbbd84009b9916 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 575.579043] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d5590e0e64814e0b90dbbd84009b9916 [ 575.602864] env[62096]: DEBUG nova.network.neutron [None req-f939c6b6-f865-40df-a5aa-fec5b14620ab tempest-InstanceActionsNegativeTestJSON-1063701282 tempest-InstanceActionsNegativeTestJSON-1063701282-project-member] [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 575.803264] env[62096]: DEBUG nova.compute.manager [req-665bf171-c1a8-4353-a320-bab0b4557826 req-90154b2b-42c7-4031-b4c7-027af1f243de service nova] [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] Received event network-vif-deleted-3d2c2a0a-10a1-4b8d-b866-46b384c96647 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 576.072528] env[62096]: DEBUG nova.network.neutron [-] [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 576.072585] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg d2dc11e0c8094a8f9b885f787b6ee8e3 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 576.090931] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d2dc11e0c8094a8f9b885f787b6ee8e3 [ 576.136310] env[62096]: DEBUG nova.network.neutron [None req-f939c6b6-f865-40df-a5aa-fec5b14620ab tempest-InstanceActionsNegativeTestJSON-1063701282 tempest-InstanceActionsNegativeTestJSON-1063701282-project-member] [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 576.136444] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-f939c6b6-f865-40df-a5aa-fec5b14620ab tempest-InstanceActionsNegativeTestJSON-1063701282 tempest-InstanceActionsNegativeTestJSON-1063701282-project-member] Expecting reply to msg b7a7466fd4e346f2ad4a1be2785599cb in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 576.148423] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b7a7466fd4e346f2ad4a1be2785599cb [ 576.159819] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56ae40f6-bb4a-402c-9521-2effa025a5c1 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.168404] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2a50b5e-3f53-4ebc-9055-60819866191a {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.206555] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c918e04b-4cce-44b6-b668-39786c5639d8 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.214572] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31882e73-bc08-4c30-8b5d-158efa2a8ca7 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.236521] env[62096]: DEBUG nova.compute.provider_tree [None req-d85fd46d-4576-455c-a571-1e0033669f66 tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 576.237053] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d85fd46d-4576-455c-a571-1e0033669f66 tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Expecting reply to msg ea01fc77bb8e4f8abd60d16d0f59347b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 576.248768] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ea01fc77bb8e4f8abd60d16d0f59347b [ 576.575580] env[62096]: INFO nova.compute.manager [-] [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] Took 1.06 seconds to deallocate network for instance. [ 576.578257] env[62096]: DEBUG nova.compute.claims [None req-298b76a9-0fa7-45b3-84c9-138485632e29 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] Aborting claim: {{(pid=62096) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 576.578257] env[62096]: DEBUG oslo_concurrency.lockutils [None req-298b76a9-0fa7-45b3-84c9-138485632e29 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 576.639183] env[62096]: DEBUG oslo_concurrency.lockutils [None req-f939c6b6-f865-40df-a5aa-fec5b14620ab tempest-InstanceActionsNegativeTestJSON-1063701282 tempest-InstanceActionsNegativeTestJSON-1063701282-project-member] Releasing lock "refresh_cache-9ebe9a03-3fc2-4a77-bb2b-d220e9693115" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 576.639438] env[62096]: DEBUG nova.compute.manager [None req-f939c6b6-f865-40df-a5aa-fec5b14620ab tempest-InstanceActionsNegativeTestJSON-1063701282 tempest-InstanceActionsNegativeTestJSON-1063701282-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 576.639614] env[62096]: DEBUG nova.compute.manager [None req-f939c6b6-f865-40df-a5aa-fec5b14620ab tempest-InstanceActionsNegativeTestJSON-1063701282 tempest-InstanceActionsNegativeTestJSON-1063701282-project-member] [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 576.639777] env[62096]: DEBUG nova.network.neutron [None req-f939c6b6-f865-40df-a5aa-fec5b14620ab tempest-InstanceActionsNegativeTestJSON-1063701282 tempest-InstanceActionsNegativeTestJSON-1063701282-project-member] [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 576.702992] env[62096]: DEBUG nova.network.neutron [None req-f939c6b6-f865-40df-a5aa-fec5b14620ab tempest-InstanceActionsNegativeTestJSON-1063701282 tempest-InstanceActionsNegativeTestJSON-1063701282-project-member] [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 576.703593] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-f939c6b6-f865-40df-a5aa-fec5b14620ab tempest-InstanceActionsNegativeTestJSON-1063701282 tempest-InstanceActionsNegativeTestJSON-1063701282-project-member] Expecting reply to msg b338df90f6aa404b96b1495dc6849f95 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 576.712781] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b338df90f6aa404b96b1495dc6849f95 [ 576.749403] env[62096]: DEBUG nova.scheduler.client.report [None req-d85fd46d-4576-455c-a571-1e0033669f66 tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 576.751674] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d85fd46d-4576-455c-a571-1e0033669f66 tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Expecting reply to msg 9ea961d614014d3ba4e54cd5da96e161 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 576.762881] env[62096]: DEBUG oslo_concurrency.lockutils [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Acquiring lock "c2a06721-6848-4fc4-89da-5d292853b6e9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 576.762881] env[62096]: DEBUG oslo_concurrency.lockutils [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Lock "c2a06721-6848-4fc4-89da-5d292853b6e9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 576.777879] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9ea961d614014d3ba4e54cd5da96e161 [ 576.802629] env[62096]: DEBUG oslo_concurrency.lockutils [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Acquiring lock "f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 576.803244] env[62096]: DEBUG oslo_concurrency.lockutils [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Lock "f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 577.207257] env[62096]: DEBUG nova.network.neutron [None req-f939c6b6-f865-40df-a5aa-fec5b14620ab tempest-InstanceActionsNegativeTestJSON-1063701282 tempest-InstanceActionsNegativeTestJSON-1063701282-project-member] [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 577.208081] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-f939c6b6-f865-40df-a5aa-fec5b14620ab tempest-InstanceActionsNegativeTestJSON-1063701282 tempest-InstanceActionsNegativeTestJSON-1063701282-project-member] Expecting reply to msg 591c5abeaae5440cb3aef1db4bf8c892 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 577.221854] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 591c5abeaae5440cb3aef1db4bf8c892 [ 577.254278] env[62096]: DEBUG oslo_concurrency.lockutils [None req-d85fd46d-4576-455c-a571-1e0033669f66 tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.531s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 577.254785] env[62096]: DEBUG nova.compute.manager [None req-d85fd46d-4576-455c-a571-1e0033669f66 tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] Start building networks asynchronously for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 577.256508] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d85fd46d-4576-455c-a571-1e0033669f66 tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Expecting reply to msg 0c6c2ba285c84485adb524e1aecbec76 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 577.257527] env[62096]: DEBUG oslo_concurrency.lockutils [None req-ba7630fb-4f65-44a7-974c-3e7e0c36eacf tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.557s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 577.259164] env[62096]: INFO nova.compute.claims [None req-ba7630fb-4f65-44a7-974c-3e7e0c36eacf tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 577.260693] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ba7630fb-4f65-44a7-974c-3e7e0c36eacf tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Expecting reply to msg 86ad82c7ce204d989693a4d2b2f3d30b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 577.297056] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0c6c2ba285c84485adb524e1aecbec76 [ 577.307723] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 86ad82c7ce204d989693a4d2b2f3d30b [ 577.711953] env[62096]: INFO nova.compute.manager [None req-f939c6b6-f865-40df-a5aa-fec5b14620ab tempest-InstanceActionsNegativeTestJSON-1063701282 tempest-InstanceActionsNegativeTestJSON-1063701282-project-member] [instance: 9ebe9a03-3fc2-4a77-bb2b-d220e9693115] Took 1.07 seconds to deallocate network for instance. [ 577.713648] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-f939c6b6-f865-40df-a5aa-fec5b14620ab tempest-InstanceActionsNegativeTestJSON-1063701282 tempest-InstanceActionsNegativeTestJSON-1063701282-project-member] Expecting reply to msg d38bfe377cc14a3fb588243dadc9a6f8 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 577.762623] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d38bfe377cc14a3fb588243dadc9a6f8 [ 577.764455] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ba7630fb-4f65-44a7-974c-3e7e0c36eacf tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Expecting reply to msg 81324d0fc3214eb6bd20d061796054d7 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 577.765985] env[62096]: DEBUG nova.compute.utils [None req-d85fd46d-4576-455c-a571-1e0033669f66 tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Using /dev/sd instead of None {{(pid=62096) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 577.766523] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d85fd46d-4576-455c-a571-1e0033669f66 tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Expecting reply to msg 1fc8933090b3418db97fee1c12b5f821 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 577.767591] env[62096]: DEBUG nova.compute.manager [None req-d85fd46d-4576-455c-a571-1e0033669f66 tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] Allocating IP information in the background. {{(pid=62096) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 577.767758] env[62096]: DEBUG nova.network.neutron [None req-d85fd46d-4576-455c-a571-1e0033669f66 tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] allocate_for_instance() {{(pid=62096) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 577.771662] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 81324d0fc3214eb6bd20d061796054d7 [ 577.779982] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1fc8933090b3418db97fee1c12b5f821 [ 577.932552] env[62096]: DEBUG nova.policy [None req-d85fd46d-4576-455c-a571-1e0033669f66 tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6951c963fea34da1ac90c7eb720a3e8f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c9af005d131c408f8cb6d4c90e8aa8c7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62096) authorize /opt/stack/nova/nova/policy.py:203}} [ 578.221903] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-f939c6b6-f865-40df-a5aa-fec5b14620ab tempest-InstanceActionsNegativeTestJSON-1063701282 tempest-InstanceActionsNegativeTestJSON-1063701282-project-member] Expecting reply to msg a3f77bc64c3242ecb50b394233acc394 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 578.271204] env[62096]: DEBUG nova.compute.manager [None req-d85fd46d-4576-455c-a571-1e0033669f66 tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] Start building block device mappings for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 578.273008] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d85fd46d-4576-455c-a571-1e0033669f66 tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Expecting reply to msg ff33788c3cd6442698cf0483fb02c005 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 578.283836] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a3f77bc64c3242ecb50b394233acc394 [ 578.316273] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ff33788c3cd6442698cf0483fb02c005 [ 578.589242] env[62096]: DEBUG oslo_concurrency.lockutils [None req-b67d5e24-acb6-4b89-8ad7-369461375b68 tempest-ServerExternalEventsTest-882999065 tempest-ServerExternalEventsTest-882999065-project-member] Acquiring lock "62b778aa-71c7-480b-8148-017773246caf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 578.589493] env[62096]: DEBUG oslo_concurrency.lockutils [None req-b67d5e24-acb6-4b89-8ad7-369461375b68 tempest-ServerExternalEventsTest-882999065 tempest-ServerExternalEventsTest-882999065-project-member] Lock "62b778aa-71c7-480b-8148-017773246caf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 578.674910] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ecd409d-1341-4205-b948-05488e9a6a5b {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.683232] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d79144c1-9744-4cee-8f0e-ea4c87248a7b {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.714048] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82816ac9-0e17-4545-af5b-4ee71dbc0eaa {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.725479] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6102bc4e-57a2-4512-9fd6-edcbe2637051 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.742241] env[62096]: DEBUG nova.compute.provider_tree [None req-ba7630fb-4f65-44a7-974c-3e7e0c36eacf tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 578.743328] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ba7630fb-4f65-44a7-974c-3e7e0c36eacf tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Expecting reply to msg 91f619539cb942c18e27d72c89a63661 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 578.749641] env[62096]: INFO nova.scheduler.client.report [None req-f939c6b6-f865-40df-a5aa-fec5b14620ab tempest-InstanceActionsNegativeTestJSON-1063701282 tempest-InstanceActionsNegativeTestJSON-1063701282-project-member] Deleted allocations for instance 9ebe9a03-3fc2-4a77-bb2b-d220e9693115 [ 578.772053] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 91f619539cb942c18e27d72c89a63661 [ 578.772977] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-f939c6b6-f865-40df-a5aa-fec5b14620ab tempest-InstanceActionsNegativeTestJSON-1063701282 tempest-InstanceActionsNegativeTestJSON-1063701282-project-member] Expecting reply to msg 54812550ad74475580176edb764f6347 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 578.782093] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d85fd46d-4576-455c-a571-1e0033669f66 tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Expecting reply to msg 7e00c56e2dd04ba8b5c3c7abf9f5eacf in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 578.790205] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 54812550ad74475580176edb764f6347 [ 578.837370] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7e00c56e2dd04ba8b5c3c7abf9f5eacf [ 579.245871] env[62096]: DEBUG nova.scheduler.client.report [None req-ba7630fb-4f65-44a7-974c-3e7e0c36eacf tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 579.248216] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ba7630fb-4f65-44a7-974c-3e7e0c36eacf tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Expecting reply to msg 05532c9c5ef54c099d29098a4ad0b309 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 579.259373] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 05532c9c5ef54c099d29098a4ad0b309 [ 579.280296] env[62096]: DEBUG oslo_concurrency.lockutils [None req-f939c6b6-f865-40df-a5aa-fec5b14620ab tempest-InstanceActionsNegativeTestJSON-1063701282 tempest-InstanceActionsNegativeTestJSON-1063701282-project-member] Lock "9ebe9a03-3fc2-4a77-bb2b-d220e9693115" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 45.276s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 579.280296] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-8703d55f-3f3e-42bb-b5d4-8086e8f02c90 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Expecting reply to msg 56a63fe621e24b1f87219b2ba6322a46 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 579.286379] env[62096]: DEBUG nova.compute.manager [None req-d85fd46d-4576-455c-a571-1e0033669f66 tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] Start spawning the instance on the hypervisor. {{(pid=62096) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 579.290606] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 56a63fe621e24b1f87219b2ba6322a46 [ 579.311178] env[62096]: DEBUG nova.virt.hardware [None req-d85fd46d-4576-455c-a571-1e0033669f66 tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-19T11:52:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-19T11:51:55Z,direct_url=,disk_format='vmdk',id=fb764baa-3805-45c4-a694-aa91b0932110,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bd76085d004b408bbcf1ebf76ba2593e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-19T11:51:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 579.311416] env[62096]: DEBUG nova.virt.hardware [None req-d85fd46d-4576-455c-a571-1e0033669f66 tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Flavor limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 579.311566] env[62096]: DEBUG nova.virt.hardware [None req-d85fd46d-4576-455c-a571-1e0033669f66 tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Image limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 579.311738] env[62096]: DEBUG nova.virt.hardware [None req-d85fd46d-4576-455c-a571-1e0033669f66 tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Flavor pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 579.311876] env[62096]: DEBUG nova.virt.hardware [None req-d85fd46d-4576-455c-a571-1e0033669f66 tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Image pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 579.312038] env[62096]: DEBUG nova.virt.hardware [None req-d85fd46d-4576-455c-a571-1e0033669f66 tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 579.312249] env[62096]: DEBUG nova.virt.hardware [None req-d85fd46d-4576-455c-a571-1e0033669f66 tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 579.312405] env[62096]: DEBUG nova.virt.hardware [None req-d85fd46d-4576-455c-a571-1e0033669f66 tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 579.312557] env[62096]: DEBUG nova.virt.hardware [None req-d85fd46d-4576-455c-a571-1e0033669f66 tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Got 1 possible topologies {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 579.312711] env[62096]: DEBUG nova.virt.hardware [None req-d85fd46d-4576-455c-a571-1e0033669f66 tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 579.312875] env[62096]: DEBUG nova.virt.hardware [None req-d85fd46d-4576-455c-a571-1e0033669f66 tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 579.313730] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2eb5818b-9df1-46bd-aa4b-2e238d7f6e5a {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.322294] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e023b8c0-bb61-4655-b9b5-f7715ab9a9d5 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.412867] env[62096]: DEBUG nova.network.neutron [None req-d85fd46d-4576-455c-a571-1e0033669f66 tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] Successfully created port: b07a03a0-8320-49d6-93ef-3631d9a04a2a {{(pid=62096) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 579.750494] env[62096]: DEBUG oslo_concurrency.lockutils [None req-ba7630fb-4f65-44a7-974c-3e7e0c36eacf tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.493s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 579.751016] env[62096]: DEBUG nova.compute.manager [None req-ba7630fb-4f65-44a7-974c-3e7e0c36eacf tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] Start building networks asynchronously for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 579.752786] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ba7630fb-4f65-44a7-974c-3e7e0c36eacf tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Expecting reply to msg afb516979ee54829bfc27b1c46fda5f5 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 579.753787] env[62096]: DEBUG oslo_concurrency.lockutils [None req-c1291365-a5cc-4752-acce-920ad54dbd71 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.284s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 579.757467] env[62096]: INFO nova.compute.claims [None req-c1291365-a5cc-4752-acce-920ad54dbd71 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 579.757467] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c1291365-a5cc-4752-acce-920ad54dbd71 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Expecting reply to msg 5fcdec193d9b49f48524b888ffab9c22 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 579.782857] env[62096]: DEBUG nova.compute.manager [None req-8703d55f-3f3e-42bb-b5d4-8086e8f02c90 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] Starting instance... {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 579.784334] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-8703d55f-3f3e-42bb-b5d4-8086e8f02c90 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Expecting reply to msg 58ed499e47fd488783f5eae229edd6a6 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 579.810381] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5fcdec193d9b49f48524b888ffab9c22 [ 579.823414] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg afb516979ee54829bfc27b1c46fda5f5 [ 579.834723] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 58ed499e47fd488783f5eae229edd6a6 [ 580.260877] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c1291365-a5cc-4752-acce-920ad54dbd71 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Expecting reply to msg d70c1530bf8f4176a06b744f2aa10754 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 580.262572] env[62096]: DEBUG nova.compute.utils [None req-ba7630fb-4f65-44a7-974c-3e7e0c36eacf tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Using /dev/sd instead of None {{(pid=62096) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 580.263122] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ba7630fb-4f65-44a7-974c-3e7e0c36eacf tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Expecting reply to msg 62dd7d9fa00b4ff391b386f1e4fafeb9 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 580.264056] env[62096]: DEBUG nova.compute.manager [None req-ba7630fb-4f65-44a7-974c-3e7e0c36eacf tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] Allocating IP information in the background. {{(pid=62096) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 580.264218] env[62096]: DEBUG nova.network.neutron [None req-ba7630fb-4f65-44a7-974c-3e7e0c36eacf tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] allocate_for_instance() {{(pid=62096) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 580.270549] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d70c1530bf8f4176a06b744f2aa10754 [ 580.285481] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 62dd7d9fa00b4ff391b386f1e4fafeb9 [ 580.311057] env[62096]: DEBUG oslo_concurrency.lockutils [None req-8703d55f-3f3e-42bb-b5d4-8086e8f02c90 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 580.662943] env[62096]: DEBUG nova.policy [None req-ba7630fb-4f65-44a7-974c-3e7e0c36eacf tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6951c963fea34da1ac90c7eb720a3e8f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c9af005d131c408f8cb6d4c90e8aa8c7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62096) authorize /opt/stack/nova/nova/policy.py:203}} [ 580.773444] env[62096]: DEBUG nova.compute.manager [None req-ba7630fb-4f65-44a7-974c-3e7e0c36eacf tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] Start building block device mappings for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 580.773444] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ba7630fb-4f65-44a7-974c-3e7e0c36eacf tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Expecting reply to msg 7770d691572f40da9e6e755659251710 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 580.823902] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7770d691572f40da9e6e755659251710 [ 581.136854] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52ba2461-070b-43e6-bdd3-17c204e3f811 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.149170] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db599bc6-6c53-43bd-9103-662e3eca6db7 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.181383] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-248eb4b9-30cf-4cfa-b1d3-cfb2c2e989ea {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.188971] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1958467-6889-48d5-8f42-c6ad13920b1c {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.206713] env[62096]: DEBUG nova.compute.provider_tree [None req-c1291365-a5cc-4752-acce-920ad54dbd71 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 581.206825] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c1291365-a5cc-4752-acce-920ad54dbd71 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Expecting reply to msg d4f105e47a53413fa239a644b9788622 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 581.218304] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d4f105e47a53413fa239a644b9788622 [ 581.277593] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ba7630fb-4f65-44a7-974c-3e7e0c36eacf tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Expecting reply to msg 3b0cbac6392b4a8b81a876f31e5e81c2 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 581.318964] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3b0cbac6392b4a8b81a876f31e5e81c2 [ 581.653930] env[62096]: DEBUG nova.network.neutron [None req-ba7630fb-4f65-44a7-974c-3e7e0c36eacf tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] Successfully created port: f73e7926-66c6-4049-9ef5-d36a5d8654a0 {{(pid=62096) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 581.710586] env[62096]: DEBUG nova.scheduler.client.report [None req-c1291365-a5cc-4752-acce-920ad54dbd71 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 581.712957] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c1291365-a5cc-4752-acce-920ad54dbd71 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Expecting reply to msg 3011d969fce642bf898fcfaff5cd1451 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 581.728177] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3011d969fce642bf898fcfaff5cd1451 [ 581.781066] env[62096]: DEBUG nova.compute.manager [None req-ba7630fb-4f65-44a7-974c-3e7e0c36eacf tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] Start spawning the instance on the hypervisor. {{(pid=62096) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 581.806920] env[62096]: DEBUG nova.virt.hardware [None req-ba7630fb-4f65-44a7-974c-3e7e0c36eacf tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-19T11:52:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-19T11:51:55Z,direct_url=,disk_format='vmdk',id=fb764baa-3805-45c4-a694-aa91b0932110,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bd76085d004b408bbcf1ebf76ba2593e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-19T11:51:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 581.807361] env[62096]: DEBUG nova.virt.hardware [None req-ba7630fb-4f65-44a7-974c-3e7e0c36eacf tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Flavor limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 581.807450] env[62096]: DEBUG nova.virt.hardware [None req-ba7630fb-4f65-44a7-974c-3e7e0c36eacf tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Image limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 581.807606] env[62096]: DEBUG nova.virt.hardware [None req-ba7630fb-4f65-44a7-974c-3e7e0c36eacf tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Flavor pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 581.807788] env[62096]: DEBUG nova.virt.hardware [None req-ba7630fb-4f65-44a7-974c-3e7e0c36eacf tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Image pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 581.807981] env[62096]: DEBUG nova.virt.hardware [None req-ba7630fb-4f65-44a7-974c-3e7e0c36eacf tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 581.808229] env[62096]: DEBUG nova.virt.hardware [None req-ba7630fb-4f65-44a7-974c-3e7e0c36eacf tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 581.808412] env[62096]: DEBUG nova.virt.hardware [None req-ba7630fb-4f65-44a7-974c-3e7e0c36eacf tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 581.808700] env[62096]: DEBUG nova.virt.hardware [None req-ba7630fb-4f65-44a7-974c-3e7e0c36eacf tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Got 1 possible topologies {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 581.808844] env[62096]: DEBUG nova.virt.hardware [None req-ba7630fb-4f65-44a7-974c-3e7e0c36eacf tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 581.808950] env[62096]: DEBUG nova.virt.hardware [None req-ba7630fb-4f65-44a7-974c-3e7e0c36eacf tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 581.809899] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-192db3b5-dfa6-4ff5-aa02-f532bf8ecc39 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.818363] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d672655-c038-493a-b25b-4a6930d0c65b {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.216601] env[62096]: DEBUG oslo_concurrency.lockutils [None req-c1291365-a5cc-4752-acce-920ad54dbd71 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.462s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 582.216601] env[62096]: DEBUG nova.compute.manager [None req-c1291365-a5cc-4752-acce-920ad54dbd71 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] Start building networks asynchronously for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 582.217938] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c1291365-a5cc-4752-acce-920ad54dbd71 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Expecting reply to msg 6d366c26b0814a7489db823b8c7316d6 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 582.219007] env[62096]: DEBUG oslo_concurrency.lockutils [None req-a5e0ed8f-7bbe-4249-b49a-d529c1e372e6 tempest-ServerActionsTestOtherB-733566255 tempest-ServerActionsTestOtherB-733566255-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 18.662s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 582.220850] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a5e0ed8f-7bbe-4249-b49a-d529c1e372e6 tempest-ServerActionsTestOtherB-733566255 tempest-ServerActionsTestOtherB-733566255-project-member] Expecting reply to msg 2a68c19b0cf24649b24c3893807b6b02 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 582.263621] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6d366c26b0814a7489db823b8c7316d6 [ 582.264792] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2a68c19b0cf24649b24c3893807b6b02 [ 582.343477] env[62096]: DEBUG oslo_concurrency.lockutils [None req-95f70777-dc9c-4d7d-99c8-605de6557417 tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Acquiring lock "e5c75298-b9ef-4e28-a038-b55d8e198539" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 582.343727] env[62096]: DEBUG oslo_concurrency.lockutils [None req-95f70777-dc9c-4d7d-99c8-605de6557417 tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Lock "e5c75298-b9ef-4e28-a038-b55d8e198539" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 582.729117] env[62096]: DEBUG nova.compute.utils [None req-c1291365-a5cc-4752-acce-920ad54dbd71 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Using /dev/sd instead of None {{(pid=62096) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 582.730172] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c1291365-a5cc-4752-acce-920ad54dbd71 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Expecting reply to msg 3edaae69f20f4a3f9f890dcc39f142d4 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 582.732869] env[62096]: DEBUG nova.compute.manager [None req-c1291365-a5cc-4752-acce-920ad54dbd71 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] Allocating IP information in the background. {{(pid=62096) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 582.733153] env[62096]: DEBUG nova.network.neutron [None req-c1291365-a5cc-4752-acce-920ad54dbd71 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] allocate_for_instance() {{(pid=62096) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 582.748228] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3edaae69f20f4a3f9f890dcc39f142d4 [ 582.918656] env[62096]: DEBUG nova.policy [None req-c1291365-a5cc-4752-acce-920ad54dbd71 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a4cd009d007949a6a0dadc6a85577e6e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f7d8f0450d084f9b984bcdf2e1cdf695', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62096) authorize /opt/stack/nova/nova/policy.py:203}} [ 583.046343] env[62096]: DEBUG oslo_concurrency.lockutils [None req-cdd99b9e-14f8-4543-8c76-d91f63d6fc9c tempest-ServerGroupTestJSON-613982516 tempest-ServerGroupTestJSON-613982516-project-member] Acquiring lock "641e5b3a-15ef-4d78-8339-7a26494038d9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 583.046576] env[62096]: DEBUG oslo_concurrency.lockutils [None req-cdd99b9e-14f8-4543-8c76-d91f63d6fc9c tempest-ServerGroupTestJSON-613982516 tempest-ServerGroupTestJSON-613982516-project-member] Lock "641e5b3a-15ef-4d78-8339-7a26494038d9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 583.153579] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3aeb44cd-6ace-400c-b1f3-32b1488cbeec {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.163679] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6870513-9ebe-4f28-99a4-a127f425565d {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.219239] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14666feb-211a-4a7a-9070-6f0bb4c4f9f3 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.229808] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8ec4d89-4ba4-40d3-bdb7-7a81027ea733 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.236049] env[62096]: DEBUG nova.compute.manager [None req-c1291365-a5cc-4752-acce-920ad54dbd71 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] Start building block device mappings for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 583.237773] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c1291365-a5cc-4752-acce-920ad54dbd71 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Expecting reply to msg 97a6fa28d6db402ea9c3897c6f569d9f in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 583.262544] env[62096]: DEBUG nova.compute.provider_tree [None req-a5e0ed8f-7bbe-4249-b49a-d529c1e372e6 tempest-ServerActionsTestOtherB-733566255 tempest-ServerActionsTestOtherB-733566255-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 583.263311] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a5e0ed8f-7bbe-4249-b49a-d529c1e372e6 tempest-ServerActionsTestOtherB-733566255 tempest-ServerActionsTestOtherB-733566255-project-member] Expecting reply to msg 9476e0ec265a4007b639ab1b447aa06b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 583.274266] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 97a6fa28d6db402ea9c3897c6f569d9f [ 583.282720] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9476e0ec265a4007b639ab1b447aa06b [ 583.647927] env[62096]: ERROR nova.compute.manager [None req-d85fd46d-4576-455c-a571-1e0033669f66 tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port b07a03a0-8320-49d6-93ef-3631d9a04a2a, please check neutron logs for more information. [ 583.647927] env[62096]: ERROR nova.compute.manager Traceback (most recent call last): [ 583.647927] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 583.647927] env[62096]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 583.647927] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 583.647927] env[62096]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 583.647927] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 583.647927] env[62096]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 583.647927] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 583.647927] env[62096]: ERROR nova.compute.manager self.force_reraise() [ 583.647927] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 583.647927] env[62096]: ERROR nova.compute.manager raise self.value [ 583.647927] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 583.647927] env[62096]: ERROR nova.compute.manager updated_port = self._update_port( [ 583.647927] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 583.647927] env[62096]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 583.648709] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 583.648709] env[62096]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 583.648709] env[62096]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port b07a03a0-8320-49d6-93ef-3631d9a04a2a, please check neutron logs for more information. [ 583.648709] env[62096]: ERROR nova.compute.manager [ 583.648709] env[62096]: Traceback (most recent call last): [ 583.648709] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 583.648709] env[62096]: listener.cb(fileno) [ 583.648709] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 583.648709] env[62096]: result = function(*args, **kwargs) [ 583.648709] env[62096]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 583.648709] env[62096]: return func(*args, **kwargs) [ 583.648709] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 583.648709] env[62096]: raise e [ 583.648709] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 583.648709] env[62096]: nwinfo = self.network_api.allocate_for_instance( [ 583.648709] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 583.648709] env[62096]: created_port_ids = self._update_ports_for_instance( [ 583.648709] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 583.648709] env[62096]: with excutils.save_and_reraise_exception(): [ 583.648709] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 583.648709] env[62096]: self.force_reraise() [ 583.648709] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 583.648709] env[62096]: raise self.value [ 583.648709] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 583.648709] env[62096]: updated_port = self._update_port( [ 583.648709] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 583.648709] env[62096]: _ensure_no_port_binding_failure(port) [ 583.648709] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 583.648709] env[62096]: raise exception.PortBindingFailed(port_id=port['id']) [ 583.649621] env[62096]: nova.exception.PortBindingFailed: Binding failed for port b07a03a0-8320-49d6-93ef-3631d9a04a2a, please check neutron logs for more information. [ 583.649621] env[62096]: Removing descriptor: 19 [ 583.649621] env[62096]: ERROR nova.compute.manager [None req-d85fd46d-4576-455c-a571-1e0033669f66 tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port b07a03a0-8320-49d6-93ef-3631d9a04a2a, please check neutron logs for more information. [ 583.649621] env[62096]: ERROR nova.compute.manager [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] Traceback (most recent call last): [ 583.649621] env[62096]: ERROR nova.compute.manager [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 583.649621] env[62096]: ERROR nova.compute.manager [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] yield resources [ 583.649621] env[62096]: ERROR nova.compute.manager [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 583.649621] env[62096]: ERROR nova.compute.manager [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] self.driver.spawn(context, instance, image_meta, [ 583.649621] env[62096]: ERROR nova.compute.manager [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 583.649621] env[62096]: ERROR nova.compute.manager [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 583.649621] env[62096]: ERROR nova.compute.manager [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 583.649621] env[62096]: ERROR nova.compute.manager [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] vm_ref = self.build_virtual_machine(instance, [ 583.649959] env[62096]: ERROR nova.compute.manager [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 583.649959] env[62096]: ERROR nova.compute.manager [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] vif_infos = vmwarevif.get_vif_info(self._session, [ 583.649959] env[62096]: ERROR nova.compute.manager [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 583.649959] env[62096]: ERROR nova.compute.manager [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] for vif in network_info: [ 583.649959] env[62096]: ERROR nova.compute.manager [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 583.649959] env[62096]: ERROR nova.compute.manager [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] return self._sync_wrapper(fn, *args, **kwargs) [ 583.649959] env[62096]: ERROR nova.compute.manager [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 583.649959] env[62096]: ERROR nova.compute.manager [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] self.wait() [ 583.649959] env[62096]: ERROR nova.compute.manager [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 583.649959] env[62096]: ERROR nova.compute.manager [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] self[:] = self._gt.wait() [ 583.649959] env[62096]: ERROR nova.compute.manager [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 583.649959] env[62096]: ERROR nova.compute.manager [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] return self._exit_event.wait() [ 583.649959] env[62096]: ERROR nova.compute.manager [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 583.650357] env[62096]: ERROR nova.compute.manager [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] result = hub.switch() [ 583.650357] env[62096]: ERROR nova.compute.manager [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 583.650357] env[62096]: ERROR nova.compute.manager [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] return self.greenlet.switch() [ 583.650357] env[62096]: ERROR nova.compute.manager [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 583.650357] env[62096]: ERROR nova.compute.manager [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] result = function(*args, **kwargs) [ 583.650357] env[62096]: ERROR nova.compute.manager [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 583.650357] env[62096]: ERROR nova.compute.manager [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] return func(*args, **kwargs) [ 583.650357] env[62096]: ERROR nova.compute.manager [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 583.650357] env[62096]: ERROR nova.compute.manager [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] raise e [ 583.650357] env[62096]: ERROR nova.compute.manager [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 583.650357] env[62096]: ERROR nova.compute.manager [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] nwinfo = self.network_api.allocate_for_instance( [ 583.650357] env[62096]: ERROR nova.compute.manager [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 583.650357] env[62096]: ERROR nova.compute.manager [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] created_port_ids = self._update_ports_for_instance( [ 583.650782] env[62096]: ERROR nova.compute.manager [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 583.650782] env[62096]: ERROR nova.compute.manager [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] with excutils.save_and_reraise_exception(): [ 583.650782] env[62096]: ERROR nova.compute.manager [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 583.650782] env[62096]: ERROR nova.compute.manager [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] self.force_reraise() [ 583.650782] env[62096]: ERROR nova.compute.manager [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 583.650782] env[62096]: ERROR nova.compute.manager [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] raise self.value [ 583.650782] env[62096]: ERROR nova.compute.manager [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 583.650782] env[62096]: ERROR nova.compute.manager [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] updated_port = self._update_port( [ 583.650782] env[62096]: ERROR nova.compute.manager [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 583.650782] env[62096]: ERROR nova.compute.manager [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] _ensure_no_port_binding_failure(port) [ 583.650782] env[62096]: ERROR nova.compute.manager [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 583.650782] env[62096]: ERROR nova.compute.manager [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] raise exception.PortBindingFailed(port_id=port['id']) [ 583.651120] env[62096]: ERROR nova.compute.manager [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] nova.exception.PortBindingFailed: Binding failed for port b07a03a0-8320-49d6-93ef-3631d9a04a2a, please check neutron logs for more information. [ 583.651120] env[62096]: ERROR nova.compute.manager [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] [ 583.651120] env[62096]: INFO nova.compute.manager [None req-d85fd46d-4576-455c-a571-1e0033669f66 tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] Terminating instance [ 583.654243] env[62096]: DEBUG oslo_concurrency.lockutils [None req-d85fd46d-4576-455c-a571-1e0033669f66 tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Acquiring lock "refresh_cache-fcdeb1ed-7f21-4338-8964-63d16d275bc2" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 583.654243] env[62096]: DEBUG oslo_concurrency.lockutils [None req-d85fd46d-4576-455c-a571-1e0033669f66 tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Acquired lock "refresh_cache-fcdeb1ed-7f21-4338-8964-63d16d275bc2" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 583.654243] env[62096]: DEBUG nova.network.neutron [None req-d85fd46d-4576-455c-a571-1e0033669f66 tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 583.654243] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d85fd46d-4576-455c-a571-1e0033669f66 tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Expecting reply to msg 861b70b5cbb4486f9279a8549f4f22b5 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 583.661651] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 861b70b5cbb4486f9279a8549f4f22b5 [ 583.768773] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c1291365-a5cc-4752-acce-920ad54dbd71 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Expecting reply to msg 8913fe1a1aba4aa5bf8524ebd8a9d714 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 583.768773] env[62096]: DEBUG nova.scheduler.client.report [None req-a5e0ed8f-7bbe-4249-b49a-d529c1e372e6 tempest-ServerActionsTestOtherB-733566255 tempest-ServerActionsTestOtherB-733566255-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 583.772309] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a5e0ed8f-7bbe-4249-b49a-d529c1e372e6 tempest-ServerActionsTestOtherB-733566255 tempest-ServerActionsTestOtherB-733566255-project-member] Expecting reply to msg b93fa6c7e9394dbeaab501ef3452858a in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 583.785677] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b93fa6c7e9394dbeaab501ef3452858a [ 583.820087] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8913fe1a1aba4aa5bf8524ebd8a9d714 [ 583.927691] env[62096]: DEBUG oslo_concurrency.lockutils [None req-5ad75242-2efb-4f8b-a5d0-817319fe309a tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Acquiring lock "374ca884-8e77-4568-8667-e124e6df4c75" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 583.927923] env[62096]: DEBUG oslo_concurrency.lockutils [None req-5ad75242-2efb-4f8b-a5d0-817319fe309a tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Lock "374ca884-8e77-4568-8667-e124e6df4c75" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 584.023297] env[62096]: DEBUG nova.network.neutron [None req-c1291365-a5cc-4752-acce-920ad54dbd71 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] Successfully created port: 6385cb29-db63-485a-90be-0c7ea8677787 {{(pid=62096) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 584.184712] env[62096]: DEBUG nova.network.neutron [None req-d85fd46d-4576-455c-a571-1e0033669f66 tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 584.275300] env[62096]: DEBUG oslo_concurrency.lockutils [None req-a5e0ed8f-7bbe-4249-b49a-d529c1e372e6 tempest-ServerActionsTestOtherB-733566255 tempest-ServerActionsTestOtherB-733566255-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.056s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 584.275933] env[62096]: ERROR nova.compute.manager [None req-a5e0ed8f-7bbe-4249-b49a-d529c1e372e6 tempest-ServerActionsTestOtherB-733566255 tempest-ServerActionsTestOtherB-733566255-project-member] [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 0198948c-99f9-4d41-9309-650d6ba5c5cc, please check neutron logs for more information. [ 584.275933] env[62096]: ERROR nova.compute.manager [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] Traceback (most recent call last): [ 584.275933] env[62096]: ERROR nova.compute.manager [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 584.275933] env[62096]: ERROR nova.compute.manager [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] self.driver.spawn(context, instance, image_meta, [ 584.275933] env[62096]: ERROR nova.compute.manager [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 584.275933] env[62096]: ERROR nova.compute.manager [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 584.275933] env[62096]: ERROR nova.compute.manager [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 584.275933] env[62096]: ERROR nova.compute.manager [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] vm_ref = self.build_virtual_machine(instance, [ 584.275933] env[62096]: ERROR nova.compute.manager [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 584.275933] env[62096]: ERROR nova.compute.manager [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] vif_infos = vmwarevif.get_vif_info(self._session, [ 584.275933] env[62096]: ERROR nova.compute.manager [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 584.276336] env[62096]: ERROR nova.compute.manager [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] for vif in network_info: [ 584.276336] env[62096]: ERROR nova.compute.manager [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 584.276336] env[62096]: ERROR nova.compute.manager [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] return self._sync_wrapper(fn, *args, **kwargs) [ 584.276336] env[62096]: ERROR nova.compute.manager [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 584.276336] env[62096]: ERROR nova.compute.manager [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] self.wait() [ 584.276336] env[62096]: ERROR nova.compute.manager [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 584.276336] env[62096]: ERROR nova.compute.manager [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] self[:] = self._gt.wait() [ 584.276336] env[62096]: ERROR nova.compute.manager [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 584.276336] env[62096]: ERROR nova.compute.manager [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] return self._exit_event.wait() [ 584.276336] env[62096]: ERROR nova.compute.manager [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 584.276336] env[62096]: ERROR nova.compute.manager [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] result = hub.switch() [ 584.276336] env[62096]: ERROR nova.compute.manager [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 584.276336] env[62096]: ERROR nova.compute.manager [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] return self.greenlet.switch() [ 584.276746] env[62096]: ERROR nova.compute.manager [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 584.276746] env[62096]: ERROR nova.compute.manager [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] result = function(*args, **kwargs) [ 584.276746] env[62096]: ERROR nova.compute.manager [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 584.276746] env[62096]: ERROR nova.compute.manager [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] return func(*args, **kwargs) [ 584.276746] env[62096]: ERROR nova.compute.manager [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 584.276746] env[62096]: ERROR nova.compute.manager [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] raise e [ 584.276746] env[62096]: ERROR nova.compute.manager [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 584.276746] env[62096]: ERROR nova.compute.manager [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] nwinfo = self.network_api.allocate_for_instance( [ 584.276746] env[62096]: ERROR nova.compute.manager [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 584.276746] env[62096]: ERROR nova.compute.manager [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] created_port_ids = self._update_ports_for_instance( [ 584.276746] env[62096]: ERROR nova.compute.manager [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 584.276746] env[62096]: ERROR nova.compute.manager [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] with excutils.save_and_reraise_exception(): [ 584.276746] env[62096]: ERROR nova.compute.manager [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 584.277173] env[62096]: ERROR nova.compute.manager [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] self.force_reraise() [ 584.277173] env[62096]: ERROR nova.compute.manager [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 584.277173] env[62096]: ERROR nova.compute.manager [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] raise self.value [ 584.277173] env[62096]: ERROR nova.compute.manager [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 584.277173] env[62096]: ERROR nova.compute.manager [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] updated_port = self._update_port( [ 584.277173] env[62096]: ERROR nova.compute.manager [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 584.277173] env[62096]: ERROR nova.compute.manager [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] _ensure_no_port_binding_failure(port) [ 584.277173] env[62096]: ERROR nova.compute.manager [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 584.277173] env[62096]: ERROR nova.compute.manager [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] raise exception.PortBindingFailed(port_id=port['id']) [ 584.277173] env[62096]: ERROR nova.compute.manager [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] nova.exception.PortBindingFailed: Binding failed for port 0198948c-99f9-4d41-9309-650d6ba5c5cc, please check neutron logs for more information. [ 584.277173] env[62096]: ERROR nova.compute.manager [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] [ 584.277516] env[62096]: DEBUG nova.compute.utils [None req-a5e0ed8f-7bbe-4249-b49a-d529c1e372e6 tempest-ServerActionsTestOtherB-733566255 tempest-ServerActionsTestOtherB-733566255-project-member] [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] Binding failed for port 0198948c-99f9-4d41-9309-650d6ba5c5cc, please check neutron logs for more information. {{(pid=62096) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 584.278532] env[62096]: DEBUG nova.compute.manager [None req-c1291365-a5cc-4752-acce-920ad54dbd71 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] Start spawning the instance on the hypervisor. {{(pid=62096) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 584.280620] env[62096]: DEBUG oslo_concurrency.lockutils [None req-19c9aad9-d898-4c0f-b588-f60abf561c62 tempest-ServersTestBootFromVolume-2123063213 tempest-ServersTestBootFromVolume-2123063213-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.350s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 584.282594] env[62096]: INFO nova.compute.claims [None req-19c9aad9-d898-4c0f-b588-f60abf561c62 tempest-ServersTestBootFromVolume-2123063213 tempest-ServersTestBootFromVolume-2123063213-project-member] [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 584.284769] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-19c9aad9-d898-4c0f-b588-f60abf561c62 tempest-ServersTestBootFromVolume-2123063213 tempest-ServersTestBootFromVolume-2123063213-project-member] Expecting reply to msg edf91964268e47048fed558e98938793 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 584.287150] env[62096]: DEBUG nova.compute.manager [None req-a5e0ed8f-7bbe-4249-b49a-d529c1e372e6 tempest-ServerActionsTestOtherB-733566255 tempest-ServerActionsTestOtherB-733566255-project-member] [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] Build of instance 773f55a5-d40c-445e-b02a-268d4f88fdd7 was re-scheduled: Binding failed for port 0198948c-99f9-4d41-9309-650d6ba5c5cc, please check neutron logs for more information. {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 584.287310] env[62096]: DEBUG nova.compute.manager [None req-a5e0ed8f-7bbe-4249-b49a-d529c1e372e6 tempest-ServerActionsTestOtherB-733566255 tempest-ServerActionsTestOtherB-733566255-project-member] [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] Unplugging VIFs for instance {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 584.287390] env[62096]: DEBUG oslo_concurrency.lockutils [None req-a5e0ed8f-7bbe-4249-b49a-d529c1e372e6 tempest-ServerActionsTestOtherB-733566255 tempest-ServerActionsTestOtherB-733566255-project-member] Acquiring lock "refresh_cache-773f55a5-d40c-445e-b02a-268d4f88fdd7" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 584.287532] env[62096]: DEBUG oslo_concurrency.lockutils [None req-a5e0ed8f-7bbe-4249-b49a-d529c1e372e6 tempest-ServerActionsTestOtherB-733566255 tempest-ServerActionsTestOtherB-733566255-project-member] Acquired lock "refresh_cache-773f55a5-d40c-445e-b02a-268d4f88fdd7" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 584.287685] env[62096]: DEBUG nova.network.neutron [None req-a5e0ed8f-7bbe-4249-b49a-d529c1e372e6 tempest-ServerActionsTestOtherB-733566255 tempest-ServerActionsTestOtherB-733566255-project-member] [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 584.288154] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a5e0ed8f-7bbe-4249-b49a-d529c1e372e6 tempest-ServerActionsTestOtherB-733566255 tempest-ServerActionsTestOtherB-733566255-project-member] Expecting reply to msg 7a144d42ff5a4a1da0730c91c635d349 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 584.297704] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7a144d42ff5a4a1da0730c91c635d349 [ 584.308439] env[62096]: DEBUG nova.virt.hardware [None req-c1291365-a5cc-4752-acce-920ad54dbd71 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-19T11:52:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-19T11:51:55Z,direct_url=,disk_format='vmdk',id=fb764baa-3805-45c4-a694-aa91b0932110,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bd76085d004b408bbcf1ebf76ba2593e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-19T11:51:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 584.308694] env[62096]: DEBUG nova.virt.hardware [None req-c1291365-a5cc-4752-acce-920ad54dbd71 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Flavor limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 584.308861] env[62096]: DEBUG nova.virt.hardware [None req-c1291365-a5cc-4752-acce-920ad54dbd71 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Image limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 584.309038] env[62096]: DEBUG nova.virt.hardware [None req-c1291365-a5cc-4752-acce-920ad54dbd71 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Flavor pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 584.309179] env[62096]: DEBUG nova.virt.hardware [None req-c1291365-a5cc-4752-acce-920ad54dbd71 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Image pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 584.309323] env[62096]: DEBUG nova.virt.hardware [None req-c1291365-a5cc-4752-acce-920ad54dbd71 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 584.309553] env[62096]: DEBUG nova.virt.hardware [None req-c1291365-a5cc-4752-acce-920ad54dbd71 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 584.309757] env[62096]: DEBUG nova.virt.hardware [None req-c1291365-a5cc-4752-acce-920ad54dbd71 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 584.309933] env[62096]: DEBUG nova.virt.hardware [None req-c1291365-a5cc-4752-acce-920ad54dbd71 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Got 1 possible topologies {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 584.310089] env[62096]: DEBUG nova.virt.hardware [None req-c1291365-a5cc-4752-acce-920ad54dbd71 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 584.310252] env[62096]: DEBUG nova.virt.hardware [None req-c1291365-a5cc-4752-acce-920ad54dbd71 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 584.311471] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39154408-b54d-4028-8879-877f8f63dd2d {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.324808] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-326e851c-8e5e-4381-afac-d6a27a57bbeb {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.343046] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg edf91964268e47048fed558e98938793 [ 584.360656] env[62096]: DEBUG nova.network.neutron [None req-d85fd46d-4576-455c-a571-1e0033669f66 tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 584.361759] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d85fd46d-4576-455c-a571-1e0033669f66 tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Expecting reply to msg 623471c974a44f16b346b3fc8071a159 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 584.372741] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 623471c974a44f16b346b3fc8071a159 [ 584.604317] env[62096]: ERROR nova.compute.manager [None req-ba7630fb-4f65-44a7-974c-3e7e0c36eacf tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port f73e7926-66c6-4049-9ef5-d36a5d8654a0, please check neutron logs for more information. [ 584.604317] env[62096]: ERROR nova.compute.manager Traceback (most recent call last): [ 584.604317] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 584.604317] env[62096]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 584.604317] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 584.604317] env[62096]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 584.604317] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 584.604317] env[62096]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 584.604317] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 584.604317] env[62096]: ERROR nova.compute.manager self.force_reraise() [ 584.604317] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 584.604317] env[62096]: ERROR nova.compute.manager raise self.value [ 584.604317] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 584.604317] env[62096]: ERROR nova.compute.manager updated_port = self._update_port( [ 584.604317] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 584.604317] env[62096]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 584.605158] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 584.605158] env[62096]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 584.605158] env[62096]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port f73e7926-66c6-4049-9ef5-d36a5d8654a0, please check neutron logs for more information. [ 584.605158] env[62096]: ERROR nova.compute.manager [ 584.605158] env[62096]: Traceback (most recent call last): [ 584.605158] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 584.605158] env[62096]: listener.cb(fileno) [ 584.605158] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 584.605158] env[62096]: result = function(*args, **kwargs) [ 584.605158] env[62096]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 584.605158] env[62096]: return func(*args, **kwargs) [ 584.605158] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 584.605158] env[62096]: raise e [ 584.605158] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 584.605158] env[62096]: nwinfo = self.network_api.allocate_for_instance( [ 584.605158] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 584.605158] env[62096]: created_port_ids = self._update_ports_for_instance( [ 584.605158] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 584.605158] env[62096]: with excutils.save_and_reraise_exception(): [ 584.605158] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 584.605158] env[62096]: self.force_reraise() [ 584.605158] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 584.605158] env[62096]: raise self.value [ 584.605158] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 584.605158] env[62096]: updated_port = self._update_port( [ 584.605158] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 584.605158] env[62096]: _ensure_no_port_binding_failure(port) [ 584.605158] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 584.605158] env[62096]: raise exception.PortBindingFailed(port_id=port['id']) [ 584.606225] env[62096]: nova.exception.PortBindingFailed: Binding failed for port f73e7926-66c6-4049-9ef5-d36a5d8654a0, please check neutron logs for more information. [ 584.606225] env[62096]: Removing descriptor: 14 [ 584.606225] env[62096]: ERROR nova.compute.manager [None req-ba7630fb-4f65-44a7-974c-3e7e0c36eacf tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port f73e7926-66c6-4049-9ef5-d36a5d8654a0, please check neutron logs for more information. [ 584.606225] env[62096]: ERROR nova.compute.manager [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] Traceback (most recent call last): [ 584.606225] env[62096]: ERROR nova.compute.manager [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 584.606225] env[62096]: ERROR nova.compute.manager [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] yield resources [ 584.606225] env[62096]: ERROR nova.compute.manager [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 584.606225] env[62096]: ERROR nova.compute.manager [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] self.driver.spawn(context, instance, image_meta, [ 584.606225] env[62096]: ERROR nova.compute.manager [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 584.606225] env[62096]: ERROR nova.compute.manager [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 584.606225] env[62096]: ERROR nova.compute.manager [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 584.606225] env[62096]: ERROR nova.compute.manager [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] vm_ref = self.build_virtual_machine(instance, [ 584.606646] env[62096]: ERROR nova.compute.manager [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 584.606646] env[62096]: ERROR nova.compute.manager [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] vif_infos = vmwarevif.get_vif_info(self._session, [ 584.606646] env[62096]: ERROR nova.compute.manager [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 584.606646] env[62096]: ERROR nova.compute.manager [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] for vif in network_info: [ 584.606646] env[62096]: ERROR nova.compute.manager [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 584.606646] env[62096]: ERROR nova.compute.manager [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] return self._sync_wrapper(fn, *args, **kwargs) [ 584.606646] env[62096]: ERROR nova.compute.manager [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 584.606646] env[62096]: ERROR nova.compute.manager [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] self.wait() [ 584.606646] env[62096]: ERROR nova.compute.manager [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 584.606646] env[62096]: ERROR nova.compute.manager [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] self[:] = self._gt.wait() [ 584.606646] env[62096]: ERROR nova.compute.manager [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 584.606646] env[62096]: ERROR nova.compute.manager [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] return self._exit_event.wait() [ 584.606646] env[62096]: ERROR nova.compute.manager [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 584.607658] env[62096]: ERROR nova.compute.manager [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] result = hub.switch() [ 584.607658] env[62096]: ERROR nova.compute.manager [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 584.607658] env[62096]: ERROR nova.compute.manager [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] return self.greenlet.switch() [ 584.607658] env[62096]: ERROR nova.compute.manager [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 584.607658] env[62096]: ERROR nova.compute.manager [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] result = function(*args, **kwargs) [ 584.607658] env[62096]: ERROR nova.compute.manager [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 584.607658] env[62096]: ERROR nova.compute.manager [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] return func(*args, **kwargs) [ 584.607658] env[62096]: ERROR nova.compute.manager [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 584.607658] env[62096]: ERROR nova.compute.manager [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] raise e [ 584.607658] env[62096]: ERROR nova.compute.manager [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 584.607658] env[62096]: ERROR nova.compute.manager [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] nwinfo = self.network_api.allocate_for_instance( [ 584.607658] env[62096]: ERROR nova.compute.manager [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 584.607658] env[62096]: ERROR nova.compute.manager [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] created_port_ids = self._update_ports_for_instance( [ 584.608156] env[62096]: ERROR nova.compute.manager [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 584.608156] env[62096]: ERROR nova.compute.manager [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] with excutils.save_and_reraise_exception(): [ 584.608156] env[62096]: ERROR nova.compute.manager [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 584.608156] env[62096]: ERROR nova.compute.manager [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] self.force_reraise() [ 584.608156] env[62096]: ERROR nova.compute.manager [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 584.608156] env[62096]: ERROR nova.compute.manager [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] raise self.value [ 584.608156] env[62096]: ERROR nova.compute.manager [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 584.608156] env[62096]: ERROR nova.compute.manager [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] updated_port = self._update_port( [ 584.608156] env[62096]: ERROR nova.compute.manager [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 584.608156] env[62096]: ERROR nova.compute.manager [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] _ensure_no_port_binding_failure(port) [ 584.608156] env[62096]: ERROR nova.compute.manager [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 584.608156] env[62096]: ERROR nova.compute.manager [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] raise exception.PortBindingFailed(port_id=port['id']) [ 584.608521] env[62096]: ERROR nova.compute.manager [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] nova.exception.PortBindingFailed: Binding failed for port f73e7926-66c6-4049-9ef5-d36a5d8654a0, please check neutron logs for more information. [ 584.608521] env[62096]: ERROR nova.compute.manager [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] [ 584.608521] env[62096]: INFO nova.compute.manager [None req-ba7630fb-4f65-44a7-974c-3e7e0c36eacf tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] Terminating instance [ 584.608521] env[62096]: DEBUG oslo_concurrency.lockutils [None req-ba7630fb-4f65-44a7-974c-3e7e0c36eacf tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Acquiring lock "refresh_cache-1377dbd7-9025-4683-be56-6e8987a5d72e" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 584.608521] env[62096]: DEBUG oslo_concurrency.lockutils [None req-ba7630fb-4f65-44a7-974c-3e7e0c36eacf tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Acquired lock "refresh_cache-1377dbd7-9025-4683-be56-6e8987a5d72e" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 584.608521] env[62096]: DEBUG nova.network.neutron [None req-ba7630fb-4f65-44a7-974c-3e7e0c36eacf tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 584.608729] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ba7630fb-4f65-44a7-974c-3e7e0c36eacf tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Expecting reply to msg eb16ed9d6e8c4a2d99703503e2ee9d1a in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 584.615323] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg eb16ed9d6e8c4a2d99703503e2ee9d1a [ 584.788746] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-19c9aad9-d898-4c0f-b588-f60abf561c62 tempest-ServersTestBootFromVolume-2123063213 tempest-ServersTestBootFromVolume-2123063213-project-member] Expecting reply to msg cdde1c4385714ef99a41a4e558f3e743 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 584.804064] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cdde1c4385714ef99a41a4e558f3e743 [ 584.818640] env[62096]: DEBUG oslo_concurrency.lockutils [None req-7f410855-d894-4223-982d-162ddb3e3fc7 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Acquiring lock "575e00ca-0f97-42c5-9e4d-706c21453210" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 584.819227] env[62096]: DEBUG oslo_concurrency.lockutils [None req-7f410855-d894-4223-982d-162ddb3e3fc7 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Lock "575e00ca-0f97-42c5-9e4d-706c21453210" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 584.843324] env[62096]: DEBUG nova.network.neutron [None req-a5e0ed8f-7bbe-4249-b49a-d529c1e372e6 tempest-ServerActionsTestOtherB-733566255 tempest-ServerActionsTestOtherB-733566255-project-member] [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 584.869021] env[62096]: DEBUG oslo_concurrency.lockutils [None req-d85fd46d-4576-455c-a571-1e0033669f66 tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Releasing lock "refresh_cache-fcdeb1ed-7f21-4338-8964-63d16d275bc2" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 584.869021] env[62096]: DEBUG nova.compute.manager [None req-d85fd46d-4576-455c-a571-1e0033669f66 tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] Start destroying the instance on the hypervisor. {{(pid=62096) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 584.869021] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-d85fd46d-4576-455c-a571-1e0033669f66 tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] Destroying instance {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 584.869021] env[62096]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b975da1d-7535-4412-b9b1-0b4c5f9b78ff {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.876632] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b9b96e7-ca18-4e8d-9cc3-4be2b09adc78 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.907880] env[62096]: WARNING nova.virt.vmwareapi.vmops [None req-d85fd46d-4576-455c-a571-1e0033669f66 tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance fcdeb1ed-7f21-4338-8964-63d16d275bc2 could not be found. [ 584.908134] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-d85fd46d-4576-455c-a571-1e0033669f66 tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] Instance destroyed {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 584.908321] env[62096]: INFO nova.compute.manager [None req-d85fd46d-4576-455c-a571-1e0033669f66 tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] Took 0.04 seconds to destroy the instance on the hypervisor. [ 584.908570] env[62096]: DEBUG oslo.service.loopingcall [None req-d85fd46d-4576-455c-a571-1e0033669f66 tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 584.908781] env[62096]: DEBUG nova.compute.manager [-] [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 584.908869] env[62096]: DEBUG nova.network.neutron [-] [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 584.943698] env[62096]: DEBUG nova.network.neutron [-] [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 584.944431] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg c1bdb7e3039a4a7cb4ff8d05f307136e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 584.952772] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c1bdb7e3039a4a7cb4ff8d05f307136e [ 585.051097] env[62096]: DEBUG nova.network.neutron [None req-a5e0ed8f-7bbe-4249-b49a-d529c1e372e6 tempest-ServerActionsTestOtherB-733566255 tempest-ServerActionsTestOtherB-733566255-project-member] [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 585.052289] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a5e0ed8f-7bbe-4249-b49a-d529c1e372e6 tempest-ServerActionsTestOtherB-733566255 tempest-ServerActionsTestOtherB-733566255-project-member] Expecting reply to msg fdc01c0407074e67a096ef78b462e662 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 585.060861] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fdc01c0407074e67a096ef78b462e662 [ 585.130131] env[62096]: DEBUG nova.network.neutron [None req-ba7630fb-4f65-44a7-974c-3e7e0c36eacf tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 585.249779] env[62096]: DEBUG nova.network.neutron [None req-ba7630fb-4f65-44a7-974c-3e7e0c36eacf tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 585.249779] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ba7630fb-4f65-44a7-974c-3e7e0c36eacf tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Expecting reply to msg 21f640276d1f418490303fc251da62ef in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 585.259145] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 21f640276d1f418490303fc251da62ef [ 585.306466] env[62096]: DEBUG nova.compute.manager [req-6ed15dce-d568-4236-ace0-d4872274ef81 req-5a32c0b1-66b6-4f0f-b7d7-569eacd8c830 service nova] [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] Received event network-changed-b07a03a0-8320-49d6-93ef-3631d9a04a2a {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 585.306466] env[62096]: DEBUG nova.compute.manager [req-6ed15dce-d568-4236-ace0-d4872274ef81 req-5a32c0b1-66b6-4f0f-b7d7-569eacd8c830 service nova] [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] Refreshing instance network info cache due to event network-changed-b07a03a0-8320-49d6-93ef-3631d9a04a2a. {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 585.306466] env[62096]: DEBUG oslo_concurrency.lockutils [req-6ed15dce-d568-4236-ace0-d4872274ef81 req-5a32c0b1-66b6-4f0f-b7d7-569eacd8c830 service nova] Acquiring lock "refresh_cache-fcdeb1ed-7f21-4338-8964-63d16d275bc2" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 585.306466] env[62096]: DEBUG oslo_concurrency.lockutils [req-6ed15dce-d568-4236-ace0-d4872274ef81 req-5a32c0b1-66b6-4f0f-b7d7-569eacd8c830 service nova] Acquired lock "refresh_cache-fcdeb1ed-7f21-4338-8964-63d16d275bc2" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 585.306641] env[62096]: DEBUG nova.network.neutron [req-6ed15dce-d568-4236-ace0-d4872274ef81 req-5a32c0b1-66b6-4f0f-b7d7-569eacd8c830 service nova] [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] Refreshing network info cache for port b07a03a0-8320-49d6-93ef-3631d9a04a2a {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 585.307067] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-6ed15dce-d568-4236-ace0-d4872274ef81 req-5a32c0b1-66b6-4f0f-b7d7-569eacd8c830 service nova] Expecting reply to msg 87cab3c88c9943c9afc53a50676aec69 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 585.324742] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 87cab3c88c9943c9afc53a50676aec69 [ 585.446451] env[62096]: DEBUG nova.network.neutron [-] [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 585.446919] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg a7bc980264bf4cd0944ceaec32be2c18 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 585.458623] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a7bc980264bf4cd0944ceaec32be2c18 [ 585.555282] env[62096]: DEBUG oslo_concurrency.lockutils [None req-a5e0ed8f-7bbe-4249-b49a-d529c1e372e6 tempest-ServerActionsTestOtherB-733566255 tempest-ServerActionsTestOtherB-733566255-project-member] Releasing lock "refresh_cache-773f55a5-d40c-445e-b02a-268d4f88fdd7" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 585.555528] env[62096]: DEBUG nova.compute.manager [None req-a5e0ed8f-7bbe-4249-b49a-d529c1e372e6 tempest-ServerActionsTestOtherB-733566255 tempest-ServerActionsTestOtherB-733566255-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 585.555714] env[62096]: DEBUG nova.compute.manager [None req-a5e0ed8f-7bbe-4249-b49a-d529c1e372e6 tempest-ServerActionsTestOtherB-733566255 tempest-ServerActionsTestOtherB-733566255-project-member] [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 585.555876] env[62096]: DEBUG nova.network.neutron [None req-a5e0ed8f-7bbe-4249-b49a-d529c1e372e6 tempest-ServerActionsTestOtherB-733566255 tempest-ServerActionsTestOtherB-733566255-project-member] [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 585.590798] env[62096]: DEBUG nova.network.neutron [None req-a5e0ed8f-7bbe-4249-b49a-d529c1e372e6 tempest-ServerActionsTestOtherB-733566255 tempest-ServerActionsTestOtherB-733566255-project-member] [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 585.591436] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a5e0ed8f-7bbe-4249-b49a-d529c1e372e6 tempest-ServerActionsTestOtherB-733566255 tempest-ServerActionsTestOtherB-733566255-project-member] Expecting reply to msg 2a2e02ac0fcb489fa26bae36b8ff81c9 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 585.599549] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2a2e02ac0fcb489fa26bae36b8ff81c9 [ 585.699686] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae0c0c45-c0ed-438f-96b3-77057db0eedc {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.714529] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f2b32b2-98f3-4859-a8bc-981cbc22b725 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.758986] env[62096]: DEBUG oslo_concurrency.lockutils [None req-ba7630fb-4f65-44a7-974c-3e7e0c36eacf tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Releasing lock "refresh_cache-1377dbd7-9025-4683-be56-6e8987a5d72e" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 585.759401] env[62096]: DEBUG nova.compute.manager [None req-ba7630fb-4f65-44a7-974c-3e7e0c36eacf tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] Start destroying the instance on the hypervisor. {{(pid=62096) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 585.759586] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-ba7630fb-4f65-44a7-974c-3e7e0c36eacf tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] Destroying instance {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 585.760099] env[62096]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-08b10892-3d82-4b1c-8c08-d69eb193189f {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.762513] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3991b86-1b47-44d0-a00c-071741a6199f {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.771435] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c02d8b0-5642-41c1-9d86-fa4adcf15227 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.784246] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a1878f1-454f-4029-b1d6-46cde8264f59 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.790466] env[62096]: DEBUG nova.compute.manager [req-ae27e6fd-5010-4da8-a7d3-64c8fb264813 req-2227cbec-c97f-4ecf-b365-6da1cbd122e8 service nova] [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] Received event network-changed-f73e7926-66c6-4049-9ef5-d36a5d8654a0 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 585.790811] env[62096]: DEBUG nova.compute.manager [req-ae27e6fd-5010-4da8-a7d3-64c8fb264813 req-2227cbec-c97f-4ecf-b365-6da1cbd122e8 service nova] [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] Refreshing instance network info cache due to event network-changed-f73e7926-66c6-4049-9ef5-d36a5d8654a0. {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 585.790944] env[62096]: DEBUG oslo_concurrency.lockutils [req-ae27e6fd-5010-4da8-a7d3-64c8fb264813 req-2227cbec-c97f-4ecf-b365-6da1cbd122e8 service nova] Acquiring lock "refresh_cache-1377dbd7-9025-4683-be56-6e8987a5d72e" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 585.791084] env[62096]: DEBUG oslo_concurrency.lockutils [req-ae27e6fd-5010-4da8-a7d3-64c8fb264813 req-2227cbec-c97f-4ecf-b365-6da1cbd122e8 service nova] Acquired lock "refresh_cache-1377dbd7-9025-4683-be56-6e8987a5d72e" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 585.791293] env[62096]: DEBUG nova.network.neutron [req-ae27e6fd-5010-4da8-a7d3-64c8fb264813 req-2227cbec-c97f-4ecf-b365-6da1cbd122e8 service nova] [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] Refreshing network info cache for port f73e7926-66c6-4049-9ef5-d36a5d8654a0 {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 585.791645] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-ae27e6fd-5010-4da8-a7d3-64c8fb264813 req-2227cbec-c97f-4ecf-b365-6da1cbd122e8 service nova] Expecting reply to msg 93976a123b734d789d79e28e14e74904 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 585.803909] env[62096]: DEBUG nova.compute.provider_tree [None req-19c9aad9-d898-4c0f-b588-f60abf561c62 tempest-ServersTestBootFromVolume-2123063213 tempest-ServersTestBootFromVolume-2123063213-project-member] Updating inventory in ProviderTree for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 585.804470] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-19c9aad9-d898-4c0f-b588-f60abf561c62 tempest-ServersTestBootFromVolume-2123063213 tempest-ServersTestBootFromVolume-2123063213-project-member] Expecting reply to msg 7ec89369ca3348ffae564e3ca0eee6a8 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 585.805495] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 93976a123b734d789d79e28e14e74904 [ 585.812204] env[62096]: WARNING nova.virt.vmwareapi.vmops [None req-ba7630fb-4f65-44a7-974c-3e7e0c36eacf tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 1377dbd7-9025-4683-be56-6e8987a5d72e could not be found. [ 585.812204] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-ba7630fb-4f65-44a7-974c-3e7e0c36eacf tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] Instance destroyed {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 585.812479] env[62096]: INFO nova.compute.manager [None req-ba7630fb-4f65-44a7-974c-3e7e0c36eacf tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] Took 0.05 seconds to destroy the instance on the hypervisor. [ 585.812602] env[62096]: DEBUG oslo.service.loopingcall [None req-ba7630fb-4f65-44a7-974c-3e7e0c36eacf tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 585.813167] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7ec89369ca3348ffae564e3ca0eee6a8 [ 585.815723] env[62096]: DEBUG nova.compute.manager [-] [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 585.815846] env[62096]: DEBUG nova.network.neutron [-] [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 585.837842] env[62096]: ERROR nova.scheduler.client.report [None req-19c9aad9-d898-4c0f-b588-f60abf561c62 tempest-ServersTestBootFromVolume-2123063213 tempest-ServersTestBootFromVolume-2123063213-project-member] [req-e33e4e4f-bd7e-4e44-81b6-3267d11f3b0d] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 6eefe13c-ab55-4c03-987f-47a62756c3b3. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-e33e4e4f-bd7e-4e44-81b6-3267d11f3b0d"}]} [ 585.856512] env[62096]: DEBUG nova.scheduler.client.report [None req-19c9aad9-d898-4c0f-b588-f60abf561c62 tempest-ServersTestBootFromVolume-2123063213 tempest-ServersTestBootFromVolume-2123063213-project-member] Refreshing inventories for resource provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 585.861727] env[62096]: DEBUG nova.network.neutron [req-6ed15dce-d568-4236-ace0-d4872274ef81 req-5a32c0b1-66b6-4f0f-b7d7-569eacd8c830 service nova] [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 585.872208] env[62096]: DEBUG nova.network.neutron [-] [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 585.872827] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg da8c01f5910f4939af5034933524a35c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 585.888341] env[62096]: DEBUG nova.scheduler.client.report [None req-19c9aad9-d898-4c0f-b588-f60abf561c62 tempest-ServersTestBootFromVolume-2123063213 tempest-ServersTestBootFromVolume-2123063213-project-member] Updating ProviderTree inventory for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 585.888750] env[62096]: DEBUG nova.compute.provider_tree [None req-19c9aad9-d898-4c0f-b588-f60abf561c62 tempest-ServersTestBootFromVolume-2123063213 tempest-ServersTestBootFromVolume-2123063213-project-member] Updating inventory in ProviderTree for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 585.891504] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg da8c01f5910f4939af5034933524a35c [ 585.917026] env[62096]: DEBUG nova.scheduler.client.report [None req-19c9aad9-d898-4c0f-b588-f60abf561c62 tempest-ServersTestBootFromVolume-2123063213 tempest-ServersTestBootFromVolume-2123063213-project-member] Refreshing aggregate associations for resource provider 6eefe13c-ab55-4c03-987f-47a62756c3b3, aggregates: None {{(pid=62096) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 585.936568] env[62096]: DEBUG nova.scheduler.client.report [None req-19c9aad9-d898-4c0f-b588-f60abf561c62 tempest-ServersTestBootFromVolume-2123063213 tempest-ServersTestBootFromVolume-2123063213-project-member] Refreshing trait associations for resource provider 6eefe13c-ab55-4c03-987f-47a62756c3b3, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO {{(pid=62096) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 585.949648] env[62096]: INFO nova.compute.manager [-] [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] Took 1.04 seconds to deallocate network for instance. [ 585.952025] env[62096]: DEBUG nova.compute.claims [None req-d85fd46d-4576-455c-a571-1e0033669f66 tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] Aborting claim: {{(pid=62096) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 585.952232] env[62096]: DEBUG oslo_concurrency.lockutils [None req-d85fd46d-4576-455c-a571-1e0033669f66 tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 586.019430] env[62096]: DEBUG oslo_concurrency.lockutils [None req-fdc6c660-999b-45f5-b0c3-7b576c288147 tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Acquiring lock "1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 586.019663] env[62096]: DEBUG oslo_concurrency.lockutils [None req-fdc6c660-999b-45f5-b0c3-7b576c288147 tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Lock "1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 586.024155] env[62096]: DEBUG nova.network.neutron [req-6ed15dce-d568-4236-ace0-d4872274ef81 req-5a32c0b1-66b6-4f0f-b7d7-569eacd8c830 service nova] [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 586.024567] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-6ed15dce-d568-4236-ace0-d4872274ef81 req-5a32c0b1-66b6-4f0f-b7d7-569eacd8c830 service nova] Expecting reply to msg 90b9bbe2f6484e9aa376eeae6af607c9 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 586.036416] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 90b9bbe2f6484e9aa376eeae6af607c9 [ 586.093629] env[62096]: DEBUG nova.network.neutron [None req-a5e0ed8f-7bbe-4249-b49a-d529c1e372e6 tempest-ServerActionsTestOtherB-733566255 tempest-ServerActionsTestOtherB-733566255-project-member] [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 586.094151] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a5e0ed8f-7bbe-4249-b49a-d529c1e372e6 tempest-ServerActionsTestOtherB-733566255 tempest-ServerActionsTestOtherB-733566255-project-member] Expecting reply to msg a9c154eaebf5466893ea958314be21c2 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 586.103601] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a9c154eaebf5466893ea958314be21c2 [ 586.378558] env[62096]: DEBUG nova.network.neutron [req-ae27e6fd-5010-4da8-a7d3-64c8fb264813 req-2227cbec-c97f-4ecf-b365-6da1cbd122e8 service nova] [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 586.378558] env[62096]: DEBUG nova.network.neutron [-] [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 586.378558] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 31fe0f2614014c009a872fe9eb78bdec in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 586.386365] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 31fe0f2614014c009a872fe9eb78bdec [ 586.415586] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0cd98a3-ead4-4b29-820b-551c1e459de9 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.431625] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-709a0fa0-0137-4ad3-9b63-32f89eed5864 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.488615] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-612900ec-7a50-4cc6-8712-4d733a35acf8 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.491956] env[62096]: DEBUG nova.network.neutron [req-ae27e6fd-5010-4da8-a7d3-64c8fb264813 req-2227cbec-c97f-4ecf-b365-6da1cbd122e8 service nova] [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 586.492153] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-ae27e6fd-5010-4da8-a7d3-64c8fb264813 req-2227cbec-c97f-4ecf-b365-6da1cbd122e8 service nova] Expecting reply to msg 21f25e094dd84f94b840a5d5770bd01b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 586.500654] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d7e528f-d6f6-4f17-ae36-61104aa8c16b {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.507718] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 21f25e094dd84f94b840a5d5770bd01b [ 586.517997] env[62096]: DEBUG nova.compute.provider_tree [None req-19c9aad9-d898-4c0f-b588-f60abf561c62 tempest-ServersTestBootFromVolume-2123063213 tempest-ServersTestBootFromVolume-2123063213-project-member] Updating inventory in ProviderTree for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 586.518378] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-19c9aad9-d898-4c0f-b588-f60abf561c62 tempest-ServersTestBootFromVolume-2123063213 tempest-ServersTestBootFromVolume-2123063213-project-member] Expecting reply to msg 773084a00a8a4d2c8da6fe242652fc50 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 586.526080] env[62096]: DEBUG oslo_concurrency.lockutils [req-6ed15dce-d568-4236-ace0-d4872274ef81 req-5a32c0b1-66b6-4f0f-b7d7-569eacd8c830 service nova] Releasing lock "refresh_cache-fcdeb1ed-7f21-4338-8964-63d16d275bc2" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 586.527578] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 773084a00a8a4d2c8da6fe242652fc50 [ 586.596404] env[62096]: INFO nova.compute.manager [None req-a5e0ed8f-7bbe-4249-b49a-d529c1e372e6 tempest-ServerActionsTestOtherB-733566255 tempest-ServerActionsTestOtherB-733566255-project-member] [instance: 773f55a5-d40c-445e-b02a-268d4f88fdd7] Took 1.04 seconds to deallocate network for instance. [ 586.599070] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a5e0ed8f-7bbe-4249-b49a-d529c1e372e6 tempest-ServerActionsTestOtherB-733566255 tempest-ServerActionsTestOtherB-733566255-project-member] Expecting reply to msg 596e1869b7a0460abfddb14b8a67e45b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 586.639703] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 596e1869b7a0460abfddb14b8a67e45b [ 586.882641] env[62096]: INFO nova.compute.manager [-] [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] Took 1.07 seconds to deallocate network for instance. [ 586.885280] env[62096]: DEBUG nova.compute.claims [None req-ba7630fb-4f65-44a7-974c-3e7e0c36eacf tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] Aborting claim: {{(pid=62096) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 586.885460] env[62096]: DEBUG oslo_concurrency.lockutils [None req-ba7630fb-4f65-44a7-974c-3e7e0c36eacf tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 586.994932] env[62096]: DEBUG oslo_concurrency.lockutils [req-ae27e6fd-5010-4da8-a7d3-64c8fb264813 req-2227cbec-c97f-4ecf-b365-6da1cbd122e8 service nova] Releasing lock "refresh_cache-1377dbd7-9025-4683-be56-6e8987a5d72e" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 587.058811] env[62096]: DEBUG nova.scheduler.client.report [None req-19c9aad9-d898-4c0f-b588-f60abf561c62 tempest-ServersTestBootFromVolume-2123063213 tempest-ServersTestBootFromVolume-2123063213-project-member] Updated inventory for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 with generation 43 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:957}} [ 587.059061] env[62096]: DEBUG nova.compute.provider_tree [None req-19c9aad9-d898-4c0f-b588-f60abf561c62 tempest-ServersTestBootFromVolume-2123063213 tempest-ServersTestBootFromVolume-2123063213-project-member] Updating resource provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 generation from 43 to 44 during operation: update_inventory {{(pid=62096) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 587.059236] env[62096]: DEBUG nova.compute.provider_tree [None req-19c9aad9-d898-4c0f-b588-f60abf561c62 tempest-ServersTestBootFromVolume-2123063213 tempest-ServersTestBootFromVolume-2123063213-project-member] Updating inventory in ProviderTree for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 587.061676] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-19c9aad9-d898-4c0f-b588-f60abf561c62 tempest-ServersTestBootFromVolume-2123063213 tempest-ServersTestBootFromVolume-2123063213-project-member] Expecting reply to msg 22ae35ff928c45c79c79ad6be4e4e27f in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 587.078365] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 22ae35ff928c45c79c79ad6be4e4e27f [ 587.082016] env[62096]: ERROR nova.compute.manager [None req-c1291365-a5cc-4752-acce-920ad54dbd71 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 6385cb29-db63-485a-90be-0c7ea8677787, please check neutron logs for more information. [ 587.082016] env[62096]: ERROR nova.compute.manager Traceback (most recent call last): [ 587.082016] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 587.082016] env[62096]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 587.082016] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 587.082016] env[62096]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 587.082016] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 587.082016] env[62096]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 587.082016] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 587.082016] env[62096]: ERROR nova.compute.manager self.force_reraise() [ 587.082016] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 587.082016] env[62096]: ERROR nova.compute.manager raise self.value [ 587.082016] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 587.082016] env[62096]: ERROR nova.compute.manager updated_port = self._update_port( [ 587.082016] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 587.082016] env[62096]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 587.082560] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 587.082560] env[62096]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 587.082560] env[62096]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 6385cb29-db63-485a-90be-0c7ea8677787, please check neutron logs for more information. [ 587.082560] env[62096]: ERROR nova.compute.manager [ 587.082560] env[62096]: Traceback (most recent call last): [ 587.082560] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 587.082560] env[62096]: listener.cb(fileno) [ 587.082560] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 587.082560] env[62096]: result = function(*args, **kwargs) [ 587.082560] env[62096]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 587.082560] env[62096]: return func(*args, **kwargs) [ 587.082560] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 587.082560] env[62096]: raise e [ 587.082560] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 587.082560] env[62096]: nwinfo = self.network_api.allocate_for_instance( [ 587.082560] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 587.082560] env[62096]: created_port_ids = self._update_ports_for_instance( [ 587.082560] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 587.082560] env[62096]: with excutils.save_and_reraise_exception(): [ 587.082560] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 587.082560] env[62096]: self.force_reraise() [ 587.082560] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 587.082560] env[62096]: raise self.value [ 587.082560] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 587.082560] env[62096]: updated_port = self._update_port( [ 587.082560] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 587.082560] env[62096]: _ensure_no_port_binding_failure(port) [ 587.082560] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 587.082560] env[62096]: raise exception.PortBindingFailed(port_id=port['id']) [ 587.083537] env[62096]: nova.exception.PortBindingFailed: Binding failed for port 6385cb29-db63-485a-90be-0c7ea8677787, please check neutron logs for more information. [ 587.083537] env[62096]: Removing descriptor: 16 [ 587.083918] env[62096]: ERROR nova.compute.manager [None req-c1291365-a5cc-4752-acce-920ad54dbd71 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 6385cb29-db63-485a-90be-0c7ea8677787, please check neutron logs for more information. [ 587.083918] env[62096]: ERROR nova.compute.manager [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] Traceback (most recent call last): [ 587.083918] env[62096]: ERROR nova.compute.manager [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 587.083918] env[62096]: ERROR nova.compute.manager [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] yield resources [ 587.083918] env[62096]: ERROR nova.compute.manager [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 587.083918] env[62096]: ERROR nova.compute.manager [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] self.driver.spawn(context, instance, image_meta, [ 587.083918] env[62096]: ERROR nova.compute.manager [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 587.083918] env[62096]: ERROR nova.compute.manager [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] self._vmops.spawn(context, instance, image_meta, injected_files, [ 587.083918] env[62096]: ERROR nova.compute.manager [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 587.083918] env[62096]: ERROR nova.compute.manager [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] vm_ref = self.build_virtual_machine(instance, [ 587.083918] env[62096]: ERROR nova.compute.manager [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 587.084308] env[62096]: ERROR nova.compute.manager [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] vif_infos = vmwarevif.get_vif_info(self._session, [ 587.084308] env[62096]: ERROR nova.compute.manager [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 587.084308] env[62096]: ERROR nova.compute.manager [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] for vif in network_info: [ 587.084308] env[62096]: ERROR nova.compute.manager [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 587.084308] env[62096]: ERROR nova.compute.manager [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] return self._sync_wrapper(fn, *args, **kwargs) [ 587.084308] env[62096]: ERROR nova.compute.manager [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 587.084308] env[62096]: ERROR nova.compute.manager [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] self.wait() [ 587.084308] env[62096]: ERROR nova.compute.manager [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 587.084308] env[62096]: ERROR nova.compute.manager [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] self[:] = self._gt.wait() [ 587.084308] env[62096]: ERROR nova.compute.manager [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 587.084308] env[62096]: ERROR nova.compute.manager [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] return self._exit_event.wait() [ 587.084308] env[62096]: ERROR nova.compute.manager [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 587.084308] env[62096]: ERROR nova.compute.manager [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] result = hub.switch() [ 587.084708] env[62096]: ERROR nova.compute.manager [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 587.084708] env[62096]: ERROR nova.compute.manager [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] return self.greenlet.switch() [ 587.084708] env[62096]: ERROR nova.compute.manager [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 587.084708] env[62096]: ERROR nova.compute.manager [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] result = function(*args, **kwargs) [ 587.084708] env[62096]: ERROR nova.compute.manager [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 587.084708] env[62096]: ERROR nova.compute.manager [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] return func(*args, **kwargs) [ 587.084708] env[62096]: ERROR nova.compute.manager [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 587.084708] env[62096]: ERROR nova.compute.manager [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] raise e [ 587.084708] env[62096]: ERROR nova.compute.manager [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 587.084708] env[62096]: ERROR nova.compute.manager [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] nwinfo = self.network_api.allocate_for_instance( [ 587.084708] env[62096]: ERROR nova.compute.manager [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 587.084708] env[62096]: ERROR nova.compute.manager [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] created_port_ids = self._update_ports_for_instance( [ 587.084708] env[62096]: ERROR nova.compute.manager [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 587.085175] env[62096]: ERROR nova.compute.manager [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] with excutils.save_and_reraise_exception(): [ 587.085175] env[62096]: ERROR nova.compute.manager [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 587.085175] env[62096]: ERROR nova.compute.manager [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] self.force_reraise() [ 587.085175] env[62096]: ERROR nova.compute.manager [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 587.085175] env[62096]: ERROR nova.compute.manager [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] raise self.value [ 587.085175] env[62096]: ERROR nova.compute.manager [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 587.085175] env[62096]: ERROR nova.compute.manager [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] updated_port = self._update_port( [ 587.085175] env[62096]: ERROR nova.compute.manager [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 587.085175] env[62096]: ERROR nova.compute.manager [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] _ensure_no_port_binding_failure(port) [ 587.085175] env[62096]: ERROR nova.compute.manager [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 587.085175] env[62096]: ERROR nova.compute.manager [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] raise exception.PortBindingFailed(port_id=port['id']) [ 587.085175] env[62096]: ERROR nova.compute.manager [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] nova.exception.PortBindingFailed: Binding failed for port 6385cb29-db63-485a-90be-0c7ea8677787, please check neutron logs for more information. [ 587.085175] env[62096]: ERROR nova.compute.manager [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] [ 587.085574] env[62096]: INFO nova.compute.manager [None req-c1291365-a5cc-4752-acce-920ad54dbd71 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] Terminating instance [ 587.086857] env[62096]: DEBUG oslo_concurrency.lockutils [None req-c1291365-a5cc-4752-acce-920ad54dbd71 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Acquiring lock "refresh_cache-09410bd9-aa4e-49a8-86fb-8058b842bd72" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 587.087042] env[62096]: DEBUG oslo_concurrency.lockutils [None req-c1291365-a5cc-4752-acce-920ad54dbd71 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Acquired lock "refresh_cache-09410bd9-aa4e-49a8-86fb-8058b842bd72" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 587.087243] env[62096]: DEBUG nova.network.neutron [None req-c1291365-a5cc-4752-acce-920ad54dbd71 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 587.088096] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c1291365-a5cc-4752-acce-920ad54dbd71 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Expecting reply to msg a17e03a8ead2491fa6405cf49afbe67d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 587.095041] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a17e03a8ead2491fa6405cf49afbe67d [ 587.103449] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a5e0ed8f-7bbe-4249-b49a-d529c1e372e6 tempest-ServerActionsTestOtherB-733566255 tempest-ServerActionsTestOtherB-733566255-project-member] Expecting reply to msg 048f10502f574264a4d26f7fd120b5f9 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 587.148396] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 048f10502f574264a4d26f7fd120b5f9 [ 587.576738] env[62096]: DEBUG oslo_concurrency.lockutils [None req-19c9aad9-d898-4c0f-b588-f60abf561c62 tempest-ServersTestBootFromVolume-2123063213 tempest-ServersTestBootFromVolume-2123063213-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.289s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 587.576738] env[62096]: DEBUG nova.compute.manager [None req-19c9aad9-d898-4c0f-b588-f60abf561c62 tempest-ServersTestBootFromVolume-2123063213 tempest-ServersTestBootFromVolume-2123063213-project-member] [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] Start building networks asynchronously for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 587.576738] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-19c9aad9-d898-4c0f-b588-f60abf561c62 tempest-ServersTestBootFromVolume-2123063213 tempest-ServersTestBootFromVolume-2123063213-project-member] Expecting reply to msg 54a09d9e305041e092c560dc012fdd79 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 587.576738] env[62096]: DEBUG oslo_concurrency.lockutils [None req-c02b4028-73ad-45a0-8da0-06fbbeeeef87 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.734s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 587.576738] env[62096]: INFO nova.compute.claims [None req-c02b4028-73ad-45a0-8da0-06fbbeeeef87 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 587.577644] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c02b4028-73ad-45a0-8da0-06fbbeeeef87 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Expecting reply to msg 3f74de4b11a44cb996e6cfb1580d3a06 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 587.625149] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 54a09d9e305041e092c560dc012fdd79 [ 587.638647] env[62096]: DEBUG nova.network.neutron [None req-c1291365-a5cc-4752-acce-920ad54dbd71 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 587.644156] env[62096]: INFO nova.scheduler.client.report [None req-a5e0ed8f-7bbe-4249-b49a-d529c1e372e6 tempest-ServerActionsTestOtherB-733566255 tempest-ServerActionsTestOtherB-733566255-project-member] Deleted allocations for instance 773f55a5-d40c-445e-b02a-268d4f88fdd7 [ 587.650928] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a5e0ed8f-7bbe-4249-b49a-d529c1e372e6 tempest-ServerActionsTestOtherB-733566255 tempest-ServerActionsTestOtherB-733566255-project-member] Expecting reply to msg 955f9c181bf14445be0e14491b5cfa63 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 587.658580] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3f74de4b11a44cb996e6cfb1580d3a06 [ 587.673625] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 955f9c181bf14445be0e14491b5cfa63 [ 587.780580] env[62096]: DEBUG oslo_concurrency.lockutils [None req-4c22e505-acaf-4907-b777-48f6dcefcfb3 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362-project-member] Acquiring lock "0d89651a-8d34-428a-a9e7-d56ce9d3c5b4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 587.781005] env[62096]: DEBUG oslo_concurrency.lockutils [None req-4c22e505-acaf-4907-b777-48f6dcefcfb3 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362-project-member] Lock "0d89651a-8d34-428a-a9e7-d56ce9d3c5b4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 587.873983] env[62096]: DEBUG nova.network.neutron [None req-c1291365-a5cc-4752-acce-920ad54dbd71 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 587.874534] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c1291365-a5cc-4752-acce-920ad54dbd71 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Expecting reply to msg 044e117c860542808164c575fc27a10a in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 587.886548] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 044e117c860542808164c575fc27a10a [ 588.080845] env[62096]: DEBUG nova.compute.utils [None req-19c9aad9-d898-4c0f-b588-f60abf561c62 tempest-ServersTestBootFromVolume-2123063213 tempest-ServersTestBootFromVolume-2123063213-project-member] Using /dev/sd instead of None {{(pid=62096) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 588.081466] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-19c9aad9-d898-4c0f-b588-f60abf561c62 tempest-ServersTestBootFromVolume-2123063213 tempest-ServersTestBootFromVolume-2123063213-project-member] Expecting reply to msg 9182ffead5d54c73a4473f2896796523 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 588.084788] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c02b4028-73ad-45a0-8da0-06fbbeeeef87 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Expecting reply to msg d8b4821dada941089dfa14212293139c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 588.084788] env[62096]: DEBUG nova.compute.manager [None req-19c9aad9-d898-4c0f-b588-f60abf561c62 tempest-ServersTestBootFromVolume-2123063213 tempest-ServersTestBootFromVolume-2123063213-project-member] [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] Allocating IP information in the background. {{(pid=62096) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 588.084788] env[62096]: DEBUG nova.network.neutron [None req-19c9aad9-d898-4c0f-b588-f60abf561c62 tempest-ServersTestBootFromVolume-2123063213 tempest-ServersTestBootFromVolume-2123063213-project-member] [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] allocate_for_instance() {{(pid=62096) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 588.103656] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9182ffead5d54c73a4473f2896796523 [ 588.105046] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d8b4821dada941089dfa14212293139c [ 588.141544] env[62096]: DEBUG nova.policy [None req-19c9aad9-d898-4c0f-b588-f60abf561c62 tempest-ServersTestBootFromVolume-2123063213 tempest-ServersTestBootFromVolume-2123063213-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '98c54142a483456a9246bc1093d20139', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0f099870ac31451fb52ef9940785c713', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62096) authorize /opt/stack/nova/nova/policy.py:203}} [ 588.152613] env[62096]: DEBUG oslo_concurrency.lockutils [None req-a5e0ed8f-7bbe-4249-b49a-d529c1e372e6 tempest-ServerActionsTestOtherB-733566255 tempest-ServerActionsTestOtherB-733566255-project-member] Lock "773f55a5-d40c-445e-b02a-268d4f88fdd7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 48.863s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 588.153161] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-dbe77661-ff6b-4cc0-8cbb-e6cfd25c54ab tempest-ServerDiagnosticsTest-1671101402 tempest-ServerDiagnosticsTest-1671101402-project-member] Expecting reply to msg 2df209cd6938402f81668518c8c00893 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 588.164546] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2df209cd6938402f81668518c8c00893 [ 588.377031] env[62096]: DEBUG oslo_concurrency.lockutils [None req-c1291365-a5cc-4752-acce-920ad54dbd71 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Releasing lock "refresh_cache-09410bd9-aa4e-49a8-86fb-8058b842bd72" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 588.377464] env[62096]: DEBUG nova.compute.manager [None req-c1291365-a5cc-4752-acce-920ad54dbd71 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] Start destroying the instance on the hypervisor. {{(pid=62096) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 588.377665] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-c1291365-a5cc-4752-acce-920ad54dbd71 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] Destroying instance {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 588.377978] env[62096]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-98ba8650-7e0f-46e5-8da7-57774ab5f1b5 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.386866] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eeedcba5-379b-4dcf-afcd-2077a70b721e {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.408368] env[62096]: WARNING nova.virt.vmwareapi.vmops [None req-c1291365-a5cc-4752-acce-920ad54dbd71 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 09410bd9-aa4e-49a8-86fb-8058b842bd72 could not be found. [ 588.408601] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-c1291365-a5cc-4752-acce-920ad54dbd71 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] Instance destroyed {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 588.408782] env[62096]: INFO nova.compute.manager [None req-c1291365-a5cc-4752-acce-920ad54dbd71 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] Took 0.03 seconds to destroy the instance on the hypervisor. [ 588.409026] env[62096]: DEBUG oslo.service.loopingcall [None req-c1291365-a5cc-4752-acce-920ad54dbd71 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 588.409244] env[62096]: DEBUG nova.compute.manager [-] [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 588.409341] env[62096]: DEBUG nova.network.neutron [-] [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 588.441396] env[62096]: DEBUG nova.network.neutron [-] [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 588.441946] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 05241439cb1c47fd95e0146e4693df1b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 588.449793] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 05241439cb1c47fd95e0146e4693df1b [ 588.591578] env[62096]: DEBUG nova.compute.manager [None req-19c9aad9-d898-4c0f-b588-f60abf561c62 tempest-ServersTestBootFromVolume-2123063213 tempest-ServersTestBootFromVolume-2123063213-project-member] [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] Start building block device mappings for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 588.593437] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-19c9aad9-d898-4c0f-b588-f60abf561c62 tempest-ServersTestBootFromVolume-2123063213 tempest-ServersTestBootFromVolume-2123063213-project-member] Expecting reply to msg cd345342d51b4ca38ece8a0f8eac2d18 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 588.599196] env[62096]: DEBUG oslo_concurrency.lockutils [None req-74a880f6-b7ea-4e56-b7f7-9e11c1a0c4ac tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Acquiring lock "ac8746f4-95b0-440e-bc3e-a92457ed664f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 588.599196] env[62096]: DEBUG oslo_concurrency.lockutils [None req-74a880f6-b7ea-4e56-b7f7-9e11c1a0c4ac tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Lock "ac8746f4-95b0-440e-bc3e-a92457ed664f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 588.638642] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cd345342d51b4ca38ece8a0f8eac2d18 [ 588.656026] env[62096]: DEBUG nova.compute.manager [None req-dbe77661-ff6b-4cc0-8cbb-e6cfd25c54ab tempest-ServerDiagnosticsTest-1671101402 tempest-ServerDiagnosticsTest-1671101402-project-member] [instance: 9bc49e7a-af22-4112-8609-348605599692] Starting instance... {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 588.657624] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-dbe77661-ff6b-4cc0-8cbb-e6cfd25c54ab tempest-ServerDiagnosticsTest-1671101402 tempest-ServerDiagnosticsTest-1671101402-project-member] Expecting reply to msg c525a6491a6e4b53a6238ff9b7e43fc9 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 588.695697] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c525a6491a6e4b53a6238ff9b7e43fc9 [ 588.735915] env[62096]: DEBUG oslo_concurrency.lockutils [None req-f283c87f-a0d6-46b3-ba4c-0ee08c3c4930 tempest-ImagesOneServerNegativeTestJSON-1092536401 tempest-ImagesOneServerNegativeTestJSON-1092536401-project-member] Acquiring lock "7d62e302-8080-4699-b88d-cb29031e6707" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 588.736334] env[62096]: DEBUG oslo_concurrency.lockutils [None req-f283c87f-a0d6-46b3-ba4c-0ee08c3c4930 tempest-ImagesOneServerNegativeTestJSON-1092536401 tempest-ImagesOneServerNegativeTestJSON-1092536401-project-member] Lock "7d62e302-8080-4699-b88d-cb29031e6707" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 588.944809] env[62096]: DEBUG nova.network.neutron [-] [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 588.944809] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 84b4f5e4c4ac4ca6a714f648c4598273 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 588.954905] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 84b4f5e4c4ac4ca6a714f648c4598273 [ 589.029978] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5a838f8-f554-4841-85c8-f5e2690a3601 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.037865] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-013bbcb5-618b-4024-abc8-833dc7dc709e {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.066508] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68fb08bc-f527-4909-bde1-5a757ef81c03 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.074164] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac921cac-02d5-43af-b061-6ca60b44c2ea {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.088119] env[62096]: DEBUG nova.compute.provider_tree [None req-c02b4028-73ad-45a0-8da0-06fbbeeeef87 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 589.089006] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c02b4028-73ad-45a0-8da0-06fbbeeeef87 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Expecting reply to msg 1a80f6e853b44a3f9a917a3fd58e6950 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 589.097870] env[62096]: INFO nova.virt.block_device [None req-19c9aad9-d898-4c0f-b588-f60abf561c62 tempest-ServersTestBootFromVolume-2123063213 tempest-ServersTestBootFromVolume-2123063213-project-member] [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] Booting with volume 2b3ef02e-ce2b-44b8-b9d7-2912bfd0364b at /dev/sda [ 589.101125] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1a80f6e853b44a3f9a917a3fd58e6950 [ 589.120476] env[62096]: DEBUG nova.compute.manager [req-343ccd32-5614-4fd0-8df8-643c918a4060 req-0d3d7250-7cbf-40a9-ace6-d1527b3c1400 service nova] [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] Received event network-vif-deleted-b07a03a0-8320-49d6-93ef-3631d9a04a2a {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 589.120476] env[62096]: DEBUG nova.compute.manager [req-343ccd32-5614-4fd0-8df8-643c918a4060 req-0d3d7250-7cbf-40a9-ace6-d1527b3c1400 service nova] [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] Received event network-changed-6385cb29-db63-485a-90be-0c7ea8677787 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 589.120476] env[62096]: DEBUG nova.compute.manager [req-343ccd32-5614-4fd0-8df8-643c918a4060 req-0d3d7250-7cbf-40a9-ace6-d1527b3c1400 service nova] [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] Refreshing instance network info cache due to event network-changed-6385cb29-db63-485a-90be-0c7ea8677787. {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 589.120584] env[62096]: DEBUG oslo_concurrency.lockutils [req-343ccd32-5614-4fd0-8df8-643c918a4060 req-0d3d7250-7cbf-40a9-ace6-d1527b3c1400 service nova] Acquiring lock "refresh_cache-09410bd9-aa4e-49a8-86fb-8058b842bd72" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 589.122356] env[62096]: DEBUG oslo_concurrency.lockutils [req-343ccd32-5614-4fd0-8df8-643c918a4060 req-0d3d7250-7cbf-40a9-ace6-d1527b3c1400 service nova] Acquired lock "refresh_cache-09410bd9-aa4e-49a8-86fb-8058b842bd72" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 589.122356] env[62096]: DEBUG nova.network.neutron [req-343ccd32-5614-4fd0-8df8-643c918a4060 req-0d3d7250-7cbf-40a9-ace6-d1527b3c1400 service nova] [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] Refreshing network info cache for port 6385cb29-db63-485a-90be-0c7ea8677787 {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 589.122356] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-343ccd32-5614-4fd0-8df8-643c918a4060 req-0d3d7250-7cbf-40a9-ace6-d1527b3c1400 service nova] Expecting reply to msg 6c708bd495094f2f8c0e071925df263c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 589.128407] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6c708bd495094f2f8c0e071925df263c [ 589.134435] env[62096]: DEBUG nova.network.neutron [None req-19c9aad9-d898-4c0f-b588-f60abf561c62 tempest-ServersTestBootFromVolume-2123063213 tempest-ServersTestBootFromVolume-2123063213-project-member] [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] Successfully created port: e792c2a8-f6e8-407d-9d05-07ff5ba3feb0 {{(pid=62096) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 589.159260] env[62096]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d3c1b56b-79ed-4705-8906-b445fcb7c64d {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.173354] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17e60024-b528-40fa-ab1b-b43d5e77c1de {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.193700] env[62096]: DEBUG oslo_concurrency.lockutils [None req-dbe77661-ff6b-4cc0-8cbb-e6cfd25c54ab tempest-ServerDiagnosticsTest-1671101402 tempest-ServerDiagnosticsTest-1671101402-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 589.206811] env[62096]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8fa448d3-270d-4e3a-ba42-f45c612f5be8 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.217251] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3714aa7a-b058-4298-91e1-b750b677d64a {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.244387] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68d3aff0-e5e4-48b6-9d52-b9529d0c3f94 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.253188] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35da0d50-1962-4583-86a5-7268430f0d48 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.268910] env[62096]: DEBUG nova.virt.block_device [None req-19c9aad9-d898-4c0f-b588-f60abf561c62 tempest-ServersTestBootFromVolume-2123063213 tempest-ServersTestBootFromVolume-2123063213-project-member] [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] Updating existing volume attachment record: 2090ab68-6046-4137-965c-ef0a19407873 {{(pid=62096) _volume_attach /opt/stack/nova/nova/virt/block_device.py:665}} [ 589.431498] env[62096]: DEBUG nova.compute.manager [req-0d032fe1-3e64-443e-87a6-ec5eb87dc9cc req-de7e39eb-1bb7-4a16-9524-0eb0df7a3f87 service nova] [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] Received event network-vif-deleted-f73e7926-66c6-4049-9ef5-d36a5d8654a0 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 589.448644] env[62096]: INFO nova.compute.manager [-] [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] Took 1.04 seconds to deallocate network for instance. [ 589.451647] env[62096]: DEBUG nova.compute.claims [None req-c1291365-a5cc-4752-acce-920ad54dbd71 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] Aborting claim: {{(pid=62096) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 589.451827] env[62096]: DEBUG oslo_concurrency.lockutils [None req-c1291365-a5cc-4752-acce-920ad54dbd71 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 589.599128] env[62096]: DEBUG nova.scheduler.client.report [None req-c02b4028-73ad-45a0-8da0-06fbbeeeef87 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 589.599478] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c02b4028-73ad-45a0-8da0-06fbbeeeef87 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Expecting reply to msg 9d2fba748c344a949cfe492864fd5958 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 589.616025] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9d2fba748c344a949cfe492864fd5958 [ 589.645762] env[62096]: DEBUG nova.network.neutron [req-343ccd32-5614-4fd0-8df8-643c918a4060 req-0d3d7250-7cbf-40a9-ace6-d1527b3c1400 service nova] [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 589.741296] env[62096]: DEBUG nova.network.neutron [req-343ccd32-5614-4fd0-8df8-643c918a4060 req-0d3d7250-7cbf-40a9-ace6-d1527b3c1400 service nova] [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 589.741296] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-343ccd32-5614-4fd0-8df8-643c918a4060 req-0d3d7250-7cbf-40a9-ace6-d1527b3c1400 service nova] Expecting reply to msg 8de3128ab3c347dd81dc242affae8022 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 589.760742] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8de3128ab3c347dd81dc242affae8022 [ 589.847031] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-19c9aad9-d898-4c0f-b588-f60abf561c62 tempest-ServersTestBootFromVolume-2123063213 tempest-ServersTestBootFromVolume-2123063213-project-member] Expecting reply to msg 215029387e924e43bce67361e1c59a01 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 589.875288] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 215029387e924e43bce67361e1c59a01 [ 589.931793] env[62096]: DEBUG oslo_concurrency.lockutils [None req-1aa056a4-9f55-42a7-a362-b5b727b382b0 tempest-ServersTestManualDisk-1610766175 tempest-ServersTestManualDisk-1610766175-project-member] Acquiring lock "3480c79b-58e4-4759-acd4-b2f45f22da54" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 589.932273] env[62096]: DEBUG oslo_concurrency.lockutils [None req-1aa056a4-9f55-42a7-a362-b5b727b382b0 tempest-ServersTestManualDisk-1610766175 tempest-ServersTestManualDisk-1610766175-project-member] Lock "3480c79b-58e4-4759-acd4-b2f45f22da54" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 590.102109] env[62096]: DEBUG oslo_concurrency.lockutils [None req-c02b4028-73ad-45a0-8da0-06fbbeeeef87 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.528s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 590.102625] env[62096]: DEBUG nova.compute.manager [None req-c02b4028-73ad-45a0-8da0-06fbbeeeef87 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] Start building networks asynchronously for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 590.104342] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c02b4028-73ad-45a0-8da0-06fbbeeeef87 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Expecting reply to msg 27b974e8bab24293ae8b1a70f0d32c64 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 590.105785] env[62096]: DEBUG oslo_concurrency.lockutils [None req-4d9c1482-ab74-4f69-8438-670cc92231c6 tempest-VolumesAssistedSnapshotsTest-1011581688 tempest-VolumesAssistedSnapshotsTest-1011581688-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 23.670s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 590.107948] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4d9c1482-ab74-4f69-8438-670cc92231c6 tempest-VolumesAssistedSnapshotsTest-1011581688 tempest-VolumesAssistedSnapshotsTest-1011581688-project-member] Expecting reply to msg 6dfd77bdf60b4b1ea40731407c1a3d73 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 590.143722] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 27b974e8bab24293ae8b1a70f0d32c64 [ 590.146096] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6dfd77bdf60b4b1ea40731407c1a3d73 [ 590.244560] env[62096]: DEBUG oslo_concurrency.lockutils [req-343ccd32-5614-4fd0-8df8-643c918a4060 req-0d3d7250-7cbf-40a9-ace6-d1527b3c1400 service nova] Releasing lock "refresh_cache-09410bd9-aa4e-49a8-86fb-8058b842bd72" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 590.244894] env[62096]: DEBUG nova.compute.manager [req-343ccd32-5614-4fd0-8df8-643c918a4060 req-0d3d7250-7cbf-40a9-ace6-d1527b3c1400 service nova] [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] Received event network-vif-deleted-6385cb29-db63-485a-90be-0c7ea8677787 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 590.387683] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-19c9aad9-d898-4c0f-b588-f60abf561c62 tempest-ServersTestBootFromVolume-2123063213 tempest-ServersTestBootFromVolume-2123063213-project-member] Expecting reply to msg c3380ba4b7754ca490f431bb7264a38a in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 590.397805] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c3380ba4b7754ca490f431bb7264a38a [ 590.611009] env[62096]: DEBUG nova.compute.utils [None req-c02b4028-73ad-45a0-8da0-06fbbeeeef87 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Using /dev/sd instead of None {{(pid=62096) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 590.611636] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c02b4028-73ad-45a0-8da0-06fbbeeeef87 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Expecting reply to msg e07d063d62fd4e5182b5cf365bc7e676 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 590.617974] env[62096]: DEBUG nova.compute.manager [None req-c02b4028-73ad-45a0-8da0-06fbbeeeef87 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] Allocating IP information in the background. {{(pid=62096) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 590.617974] env[62096]: DEBUG nova.network.neutron [None req-c02b4028-73ad-45a0-8da0-06fbbeeeef87 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] allocate_for_instance() {{(pid=62096) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 590.625744] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e07d063d62fd4e5182b5cf365bc7e676 [ 590.683353] env[62096]: DEBUG nova.policy [None req-c02b4028-73ad-45a0-8da0-06fbbeeeef87 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a4cd009d007949a6a0dadc6a85577e6e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f7d8f0450d084f9b984bcdf2e1cdf695', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62096) authorize /opt/stack/nova/nova/policy.py:203}} [ 590.891553] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-19c9aad9-d898-4c0f-b588-f60abf561c62 tempest-ServersTestBootFromVolume-2123063213 tempest-ServersTestBootFromVolume-2123063213-project-member] Expecting reply to msg ab95d209fbf848c083908d3412f26f25 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 590.949574] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ab95d209fbf848c083908d3412f26f25 [ 591.102414] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4cab0f5-eac3-4b82-ac00-2a69a7e520a3 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.109990] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd67e735-c025-4a67-bd00-b0bfc5d9d8db {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.116609] env[62096]: DEBUG nova.compute.manager [None req-c02b4028-73ad-45a0-8da0-06fbbeeeef87 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] Start building block device mappings for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 591.118596] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c02b4028-73ad-45a0-8da0-06fbbeeeef87 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Expecting reply to msg 5dd82641cc324e508f87c9f66fa82ed2 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 591.143948] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dd72832-35b0-4f98-bd14-b9ecb6c62f60 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.151840] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87bb77ec-e7c3-46be-8c57-dcf3012b37da {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.167529] env[62096]: DEBUG nova.compute.provider_tree [None req-4d9c1482-ab74-4f69-8438-670cc92231c6 tempest-VolumesAssistedSnapshotsTest-1011581688 tempest-VolumesAssistedSnapshotsTest-1011581688-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 591.168180] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4d9c1482-ab74-4f69-8438-670cc92231c6 tempest-VolumesAssistedSnapshotsTest-1011581688 tempest-VolumesAssistedSnapshotsTest-1011581688-project-member] Expecting reply to msg 78f03b5699ed444882a600ca9750038a in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 591.169351] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5dd82641cc324e508f87c9f66fa82ed2 [ 591.178240] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 78f03b5699ed444882a600ca9750038a [ 591.207048] env[62096]: DEBUG nova.network.neutron [None req-c02b4028-73ad-45a0-8da0-06fbbeeeef87 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] Successfully created port: 2371b750-f7b3-4954-904e-f8f99fe6cc78 {{(pid=62096) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 591.249895] env[62096]: ERROR nova.compute.manager [None req-19c9aad9-d898-4c0f-b588-f60abf561c62 tempest-ServersTestBootFromVolume-2123063213 tempest-ServersTestBootFromVolume-2123063213-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port e792c2a8-f6e8-407d-9d05-07ff5ba3feb0, please check neutron logs for more information. [ 591.249895] env[62096]: ERROR nova.compute.manager Traceback (most recent call last): [ 591.249895] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 591.249895] env[62096]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 591.249895] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 591.249895] env[62096]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 591.249895] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 591.249895] env[62096]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 591.249895] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 591.249895] env[62096]: ERROR nova.compute.manager self.force_reraise() [ 591.249895] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 591.249895] env[62096]: ERROR nova.compute.manager raise self.value [ 591.249895] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 591.249895] env[62096]: ERROR nova.compute.manager updated_port = self._update_port( [ 591.249895] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 591.249895] env[62096]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 591.250434] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 591.250434] env[62096]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 591.250434] env[62096]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port e792c2a8-f6e8-407d-9d05-07ff5ba3feb0, please check neutron logs for more information. [ 591.250434] env[62096]: ERROR nova.compute.manager [ 591.250434] env[62096]: Traceback (most recent call last): [ 591.250434] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 591.250434] env[62096]: listener.cb(fileno) [ 591.250434] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 591.250434] env[62096]: result = function(*args, **kwargs) [ 591.250434] env[62096]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 591.250434] env[62096]: return func(*args, **kwargs) [ 591.250434] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 591.250434] env[62096]: raise e [ 591.250434] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 591.250434] env[62096]: nwinfo = self.network_api.allocate_for_instance( [ 591.250434] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 591.250434] env[62096]: created_port_ids = self._update_ports_for_instance( [ 591.250434] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 591.250434] env[62096]: with excutils.save_and_reraise_exception(): [ 591.250434] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 591.250434] env[62096]: self.force_reraise() [ 591.250434] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 591.250434] env[62096]: raise self.value [ 591.250434] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 591.250434] env[62096]: updated_port = self._update_port( [ 591.250434] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 591.250434] env[62096]: _ensure_no_port_binding_failure(port) [ 591.250434] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 591.250434] env[62096]: raise exception.PortBindingFailed(port_id=port['id']) [ 591.251340] env[62096]: nova.exception.PortBindingFailed: Binding failed for port e792c2a8-f6e8-407d-9d05-07ff5ba3feb0, please check neutron logs for more information. [ 591.251340] env[62096]: Removing descriptor: 16 [ 591.396086] env[62096]: DEBUG nova.compute.manager [None req-19c9aad9-d898-4c0f-b588-f60abf561c62 tempest-ServersTestBootFromVolume-2123063213 tempest-ServersTestBootFromVolume-2123063213-project-member] [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] Start spawning the instance on the hypervisor. {{(pid=62096) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 591.396493] env[62096]: DEBUG nova.virt.hardware [None req-19c9aad9-d898-4c0f-b588-f60abf561c62 tempest-ServersTestBootFromVolume-2123063213 tempest-ServersTestBootFromVolume-2123063213-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-19T11:52:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 591.396712] env[62096]: DEBUG nova.virt.hardware [None req-19c9aad9-d898-4c0f-b588-f60abf561c62 tempest-ServersTestBootFromVolume-2123063213 tempest-ServersTestBootFromVolume-2123063213-project-member] Flavor limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 591.396865] env[62096]: DEBUG nova.virt.hardware [None req-19c9aad9-d898-4c0f-b588-f60abf561c62 tempest-ServersTestBootFromVolume-2123063213 tempest-ServersTestBootFromVolume-2123063213-project-member] Image limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 591.397070] env[62096]: DEBUG nova.virt.hardware [None req-19c9aad9-d898-4c0f-b588-f60abf561c62 tempest-ServersTestBootFromVolume-2123063213 tempest-ServersTestBootFromVolume-2123063213-project-member] Flavor pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 591.397225] env[62096]: DEBUG nova.virt.hardware [None req-19c9aad9-d898-4c0f-b588-f60abf561c62 tempest-ServersTestBootFromVolume-2123063213 tempest-ServersTestBootFromVolume-2123063213-project-member] Image pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 591.397369] env[62096]: DEBUG nova.virt.hardware [None req-19c9aad9-d898-4c0f-b588-f60abf561c62 tempest-ServersTestBootFromVolume-2123063213 tempest-ServersTestBootFromVolume-2123063213-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 591.397564] env[62096]: DEBUG nova.virt.hardware [None req-19c9aad9-d898-4c0f-b588-f60abf561c62 tempest-ServersTestBootFromVolume-2123063213 tempest-ServersTestBootFromVolume-2123063213-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 591.397718] env[62096]: DEBUG nova.virt.hardware [None req-19c9aad9-d898-4c0f-b588-f60abf561c62 tempest-ServersTestBootFromVolume-2123063213 tempest-ServersTestBootFromVolume-2123063213-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 591.397880] env[62096]: DEBUG nova.virt.hardware [None req-19c9aad9-d898-4c0f-b588-f60abf561c62 tempest-ServersTestBootFromVolume-2123063213 tempest-ServersTestBootFromVolume-2123063213-project-member] Got 1 possible topologies {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 591.398039] env[62096]: DEBUG nova.virt.hardware [None req-19c9aad9-d898-4c0f-b588-f60abf561c62 tempest-ServersTestBootFromVolume-2123063213 tempest-ServersTestBootFromVolume-2123063213-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 591.398209] env[62096]: DEBUG nova.virt.hardware [None req-19c9aad9-d898-4c0f-b588-f60abf561c62 tempest-ServersTestBootFromVolume-2123063213 tempest-ServersTestBootFromVolume-2123063213-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 591.399483] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-231da86e-56fb-4baf-9813-0d31d63d3ca0 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.408396] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8fd5bc6-cf64-4f23-8751-13b6f24143ac {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.424332] env[62096]: ERROR nova.compute.manager [None req-19c9aad9-d898-4c0f-b588-f60abf561c62 tempest-ServersTestBootFromVolume-2123063213 tempest-ServersTestBootFromVolume-2123063213-project-member] [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port e792c2a8-f6e8-407d-9d05-07ff5ba3feb0, please check neutron logs for more information. [ 591.424332] env[62096]: ERROR nova.compute.manager [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] Traceback (most recent call last): [ 591.424332] env[62096]: ERROR nova.compute.manager [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 591.424332] env[62096]: ERROR nova.compute.manager [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] yield resources [ 591.424332] env[62096]: ERROR nova.compute.manager [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 591.424332] env[62096]: ERROR nova.compute.manager [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] self.driver.spawn(context, instance, image_meta, [ 591.424332] env[62096]: ERROR nova.compute.manager [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 591.424332] env[62096]: ERROR nova.compute.manager [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] self._vmops.spawn(context, instance, image_meta, injected_files, [ 591.424332] env[62096]: ERROR nova.compute.manager [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 591.424332] env[62096]: ERROR nova.compute.manager [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] vm_ref = self.build_virtual_machine(instance, [ 591.424332] env[62096]: ERROR nova.compute.manager [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 591.424715] env[62096]: ERROR nova.compute.manager [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] vif_infos = vmwarevif.get_vif_info(self._session, [ 591.424715] env[62096]: ERROR nova.compute.manager [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 591.424715] env[62096]: ERROR nova.compute.manager [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] for vif in network_info: [ 591.424715] env[62096]: ERROR nova.compute.manager [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 591.424715] env[62096]: ERROR nova.compute.manager [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] return self._sync_wrapper(fn, *args, **kwargs) [ 591.424715] env[62096]: ERROR nova.compute.manager [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 591.424715] env[62096]: ERROR nova.compute.manager [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] self.wait() [ 591.424715] env[62096]: ERROR nova.compute.manager [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 591.424715] env[62096]: ERROR nova.compute.manager [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] self[:] = self._gt.wait() [ 591.424715] env[62096]: ERROR nova.compute.manager [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 591.424715] env[62096]: ERROR nova.compute.manager [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] return self._exit_event.wait() [ 591.424715] env[62096]: ERROR nova.compute.manager [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 591.424715] env[62096]: ERROR nova.compute.manager [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] current.throw(*self._exc) [ 591.425170] env[62096]: ERROR nova.compute.manager [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 591.425170] env[62096]: ERROR nova.compute.manager [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] result = function(*args, **kwargs) [ 591.425170] env[62096]: ERROR nova.compute.manager [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 591.425170] env[62096]: ERROR nova.compute.manager [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] return func(*args, **kwargs) [ 591.425170] env[62096]: ERROR nova.compute.manager [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 591.425170] env[62096]: ERROR nova.compute.manager [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] raise e [ 591.425170] env[62096]: ERROR nova.compute.manager [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 591.425170] env[62096]: ERROR nova.compute.manager [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] nwinfo = self.network_api.allocate_for_instance( [ 591.425170] env[62096]: ERROR nova.compute.manager [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 591.425170] env[62096]: ERROR nova.compute.manager [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] created_port_ids = self._update_ports_for_instance( [ 591.425170] env[62096]: ERROR nova.compute.manager [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 591.425170] env[62096]: ERROR nova.compute.manager [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] with excutils.save_and_reraise_exception(): [ 591.425170] env[62096]: ERROR nova.compute.manager [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 591.425670] env[62096]: ERROR nova.compute.manager [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] self.force_reraise() [ 591.425670] env[62096]: ERROR nova.compute.manager [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 591.425670] env[62096]: ERROR nova.compute.manager [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] raise self.value [ 591.425670] env[62096]: ERROR nova.compute.manager [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 591.425670] env[62096]: ERROR nova.compute.manager [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] updated_port = self._update_port( [ 591.425670] env[62096]: ERROR nova.compute.manager [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 591.425670] env[62096]: ERROR nova.compute.manager [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] _ensure_no_port_binding_failure(port) [ 591.425670] env[62096]: ERROR nova.compute.manager [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 591.425670] env[62096]: ERROR nova.compute.manager [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] raise exception.PortBindingFailed(port_id=port['id']) [ 591.425670] env[62096]: ERROR nova.compute.manager [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] nova.exception.PortBindingFailed: Binding failed for port e792c2a8-f6e8-407d-9d05-07ff5ba3feb0, please check neutron logs for more information. [ 591.425670] env[62096]: ERROR nova.compute.manager [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] [ 591.425670] env[62096]: INFO nova.compute.manager [None req-19c9aad9-d898-4c0f-b588-f60abf561c62 tempest-ServersTestBootFromVolume-2123063213 tempest-ServersTestBootFromVolume-2123063213-project-member] [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] Terminating instance [ 591.427673] env[62096]: DEBUG oslo_concurrency.lockutils [None req-19c9aad9-d898-4c0f-b588-f60abf561c62 tempest-ServersTestBootFromVolume-2123063213 tempest-ServersTestBootFromVolume-2123063213-project-member] Acquiring lock "refresh_cache-4fca4cf6-1c0e-487e-bf26-fc441d143128" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 591.427673] env[62096]: DEBUG oslo_concurrency.lockutils [None req-19c9aad9-d898-4c0f-b588-f60abf561c62 tempest-ServersTestBootFromVolume-2123063213 tempest-ServersTestBootFromVolume-2123063213-project-member] Acquired lock "refresh_cache-4fca4cf6-1c0e-487e-bf26-fc441d143128" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 591.427861] env[62096]: DEBUG nova.network.neutron [None req-19c9aad9-d898-4c0f-b588-f60abf561c62 tempest-ServersTestBootFromVolume-2123063213 tempest-ServersTestBootFromVolume-2123063213-project-member] [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 591.428332] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-19c9aad9-d898-4c0f-b588-f60abf561c62 tempest-ServersTestBootFromVolume-2123063213 tempest-ServersTestBootFromVolume-2123063213-project-member] Expecting reply to msg 8e2ab69df00c469485f0a214ae75356f in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 591.435555] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8e2ab69df00c469485f0a214ae75356f [ 591.592463] env[62096]: DEBUG nova.compute.manager [req-b8a10c56-431d-43ea-acd6-6144abcbcf68 req-1efb986b-08ca-43da-a9e6-f2816dfb04a8 service nova] [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] Received event network-changed-e792c2a8-f6e8-407d-9d05-07ff5ba3feb0 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 591.592665] env[62096]: DEBUG nova.compute.manager [req-b8a10c56-431d-43ea-acd6-6144abcbcf68 req-1efb986b-08ca-43da-a9e6-f2816dfb04a8 service nova] [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] Refreshing instance network info cache due to event network-changed-e792c2a8-f6e8-407d-9d05-07ff5ba3feb0. {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 591.592855] env[62096]: DEBUG oslo_concurrency.lockutils [req-b8a10c56-431d-43ea-acd6-6144abcbcf68 req-1efb986b-08ca-43da-a9e6-f2816dfb04a8 service nova] Acquiring lock "refresh_cache-4fca4cf6-1c0e-487e-bf26-fc441d143128" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 591.646307] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c02b4028-73ad-45a0-8da0-06fbbeeeef87 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Expecting reply to msg 322519f818da42bda81cf257aa350bdb in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 591.671714] env[62096]: DEBUG nova.scheduler.client.report [None req-4d9c1482-ab74-4f69-8438-670cc92231c6 tempest-VolumesAssistedSnapshotsTest-1011581688 tempest-VolumesAssistedSnapshotsTest-1011581688-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 591.674138] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4d9c1482-ab74-4f69-8438-670cc92231c6 tempest-VolumesAssistedSnapshotsTest-1011581688 tempest-VolumesAssistedSnapshotsTest-1011581688-project-member] Expecting reply to msg 7fa64b19183245d9acade95455770844 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 591.687478] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 322519f818da42bda81cf257aa350bdb [ 591.689710] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7fa64b19183245d9acade95455770844 [ 591.969072] env[62096]: DEBUG nova.network.neutron [None req-19c9aad9-d898-4c0f-b588-f60abf561c62 tempest-ServersTestBootFromVolume-2123063213 tempest-ServersTestBootFromVolume-2123063213-project-member] [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 592.162605] env[62096]: DEBUG nova.compute.manager [None req-c02b4028-73ad-45a0-8da0-06fbbeeeef87 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] Start spawning the instance on the hypervisor. {{(pid=62096) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 592.176861] env[62096]: DEBUG oslo_concurrency.lockutils [None req-4d9c1482-ab74-4f69-8438-670cc92231c6 tempest-VolumesAssistedSnapshotsTest-1011581688 tempest-VolumesAssistedSnapshotsTest-1011581688-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.071s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 592.177597] env[62096]: ERROR nova.compute.manager [None req-4d9c1482-ab74-4f69-8438-670cc92231c6 tempest-VolumesAssistedSnapshotsTest-1011581688 tempest-VolumesAssistedSnapshotsTest-1011581688-project-member] [instance: 3530e93e-f729-471b-976b-e52c3182cb15] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 5fdd06cc-1563-494e-a278-34fb92f36b0a, please check neutron logs for more information. [ 592.177597] env[62096]: ERROR nova.compute.manager [instance: 3530e93e-f729-471b-976b-e52c3182cb15] Traceback (most recent call last): [ 592.177597] env[62096]: ERROR nova.compute.manager [instance: 3530e93e-f729-471b-976b-e52c3182cb15] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 592.177597] env[62096]: ERROR nova.compute.manager [instance: 3530e93e-f729-471b-976b-e52c3182cb15] self.driver.spawn(context, instance, image_meta, [ 592.177597] env[62096]: ERROR nova.compute.manager [instance: 3530e93e-f729-471b-976b-e52c3182cb15] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 592.177597] env[62096]: ERROR nova.compute.manager [instance: 3530e93e-f729-471b-976b-e52c3182cb15] self._vmops.spawn(context, instance, image_meta, injected_files, [ 592.177597] env[62096]: ERROR nova.compute.manager [instance: 3530e93e-f729-471b-976b-e52c3182cb15] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 592.177597] env[62096]: ERROR nova.compute.manager [instance: 3530e93e-f729-471b-976b-e52c3182cb15] vm_ref = self.build_virtual_machine(instance, [ 592.177597] env[62096]: ERROR nova.compute.manager [instance: 3530e93e-f729-471b-976b-e52c3182cb15] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 592.177597] env[62096]: ERROR nova.compute.manager [instance: 3530e93e-f729-471b-976b-e52c3182cb15] vif_infos = vmwarevif.get_vif_info(self._session, [ 592.177597] env[62096]: ERROR nova.compute.manager [instance: 3530e93e-f729-471b-976b-e52c3182cb15] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 592.177992] env[62096]: ERROR nova.compute.manager [instance: 3530e93e-f729-471b-976b-e52c3182cb15] for vif in network_info: [ 592.177992] env[62096]: ERROR nova.compute.manager [instance: 3530e93e-f729-471b-976b-e52c3182cb15] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 592.177992] env[62096]: ERROR nova.compute.manager [instance: 3530e93e-f729-471b-976b-e52c3182cb15] return self._sync_wrapper(fn, *args, **kwargs) [ 592.177992] env[62096]: ERROR nova.compute.manager [instance: 3530e93e-f729-471b-976b-e52c3182cb15] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 592.177992] env[62096]: ERROR nova.compute.manager [instance: 3530e93e-f729-471b-976b-e52c3182cb15] self.wait() [ 592.177992] env[62096]: ERROR nova.compute.manager [instance: 3530e93e-f729-471b-976b-e52c3182cb15] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 592.177992] env[62096]: ERROR nova.compute.manager [instance: 3530e93e-f729-471b-976b-e52c3182cb15] self[:] = self._gt.wait() [ 592.177992] env[62096]: ERROR nova.compute.manager [instance: 3530e93e-f729-471b-976b-e52c3182cb15] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 592.177992] env[62096]: ERROR nova.compute.manager [instance: 3530e93e-f729-471b-976b-e52c3182cb15] return self._exit_event.wait() [ 592.177992] env[62096]: ERROR nova.compute.manager [instance: 3530e93e-f729-471b-976b-e52c3182cb15] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 592.177992] env[62096]: ERROR nova.compute.manager [instance: 3530e93e-f729-471b-976b-e52c3182cb15] result = hub.switch() [ 592.177992] env[62096]: ERROR nova.compute.manager [instance: 3530e93e-f729-471b-976b-e52c3182cb15] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 592.177992] env[62096]: ERROR nova.compute.manager [instance: 3530e93e-f729-471b-976b-e52c3182cb15] return self.greenlet.switch() [ 592.178349] env[62096]: ERROR nova.compute.manager [instance: 3530e93e-f729-471b-976b-e52c3182cb15] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 592.178349] env[62096]: ERROR nova.compute.manager [instance: 3530e93e-f729-471b-976b-e52c3182cb15] result = function(*args, **kwargs) [ 592.178349] env[62096]: ERROR nova.compute.manager [instance: 3530e93e-f729-471b-976b-e52c3182cb15] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 592.178349] env[62096]: ERROR nova.compute.manager [instance: 3530e93e-f729-471b-976b-e52c3182cb15] return func(*args, **kwargs) [ 592.178349] env[62096]: ERROR nova.compute.manager [instance: 3530e93e-f729-471b-976b-e52c3182cb15] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 592.178349] env[62096]: ERROR nova.compute.manager [instance: 3530e93e-f729-471b-976b-e52c3182cb15] raise e [ 592.178349] env[62096]: ERROR nova.compute.manager [instance: 3530e93e-f729-471b-976b-e52c3182cb15] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 592.178349] env[62096]: ERROR nova.compute.manager [instance: 3530e93e-f729-471b-976b-e52c3182cb15] nwinfo = self.network_api.allocate_for_instance( [ 592.178349] env[62096]: ERROR nova.compute.manager [instance: 3530e93e-f729-471b-976b-e52c3182cb15] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 592.178349] env[62096]: ERROR nova.compute.manager [instance: 3530e93e-f729-471b-976b-e52c3182cb15] created_port_ids = self._update_ports_for_instance( [ 592.178349] env[62096]: ERROR nova.compute.manager [instance: 3530e93e-f729-471b-976b-e52c3182cb15] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 592.178349] env[62096]: ERROR nova.compute.manager [instance: 3530e93e-f729-471b-976b-e52c3182cb15] with excutils.save_and_reraise_exception(): [ 592.178349] env[62096]: ERROR nova.compute.manager [instance: 3530e93e-f729-471b-976b-e52c3182cb15] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 592.178703] env[62096]: ERROR nova.compute.manager [instance: 3530e93e-f729-471b-976b-e52c3182cb15] self.force_reraise() [ 592.178703] env[62096]: ERROR nova.compute.manager [instance: 3530e93e-f729-471b-976b-e52c3182cb15] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 592.178703] env[62096]: ERROR nova.compute.manager [instance: 3530e93e-f729-471b-976b-e52c3182cb15] raise self.value [ 592.178703] env[62096]: ERROR nova.compute.manager [instance: 3530e93e-f729-471b-976b-e52c3182cb15] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 592.178703] env[62096]: ERROR nova.compute.manager [instance: 3530e93e-f729-471b-976b-e52c3182cb15] updated_port = self._update_port( [ 592.178703] env[62096]: ERROR nova.compute.manager [instance: 3530e93e-f729-471b-976b-e52c3182cb15] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 592.178703] env[62096]: ERROR nova.compute.manager [instance: 3530e93e-f729-471b-976b-e52c3182cb15] _ensure_no_port_binding_failure(port) [ 592.178703] env[62096]: ERROR nova.compute.manager [instance: 3530e93e-f729-471b-976b-e52c3182cb15] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 592.178703] env[62096]: ERROR nova.compute.manager [instance: 3530e93e-f729-471b-976b-e52c3182cb15] raise exception.PortBindingFailed(port_id=port['id']) [ 592.178703] env[62096]: ERROR nova.compute.manager [instance: 3530e93e-f729-471b-976b-e52c3182cb15] nova.exception.PortBindingFailed: Binding failed for port 5fdd06cc-1563-494e-a278-34fb92f36b0a, please check neutron logs for more information. [ 592.178703] env[62096]: ERROR nova.compute.manager [instance: 3530e93e-f729-471b-976b-e52c3182cb15] [ 592.179010] env[62096]: DEBUG nova.compute.utils [None req-4d9c1482-ab74-4f69-8438-670cc92231c6 tempest-VolumesAssistedSnapshotsTest-1011581688 tempest-VolumesAssistedSnapshotsTest-1011581688-project-member] [instance: 3530e93e-f729-471b-976b-e52c3182cb15] Binding failed for port 5fdd06cc-1563-494e-a278-34fb92f36b0a, please check neutron logs for more information. {{(pid=62096) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 592.179933] env[62096]: DEBUG oslo_concurrency.lockutils [None req-bff733c1-b59e-456d-a598-27648ab77c9f tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.491s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 592.181572] env[62096]: INFO nova.compute.claims [None req-bff733c1-b59e-456d-a598-27648ab77c9f tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 592.183257] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-bff733c1-b59e-456d-a598-27648ab77c9f tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Expecting reply to msg 97a34d80e6c84af1886f7fd445e870d7 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 592.192448] env[62096]: DEBUG nova.compute.manager [None req-4d9c1482-ab74-4f69-8438-670cc92231c6 tempest-VolumesAssistedSnapshotsTest-1011581688 tempest-VolumesAssistedSnapshotsTest-1011581688-project-member] [instance: 3530e93e-f729-471b-976b-e52c3182cb15] Build of instance 3530e93e-f729-471b-976b-e52c3182cb15 was re-scheduled: Binding failed for port 5fdd06cc-1563-494e-a278-34fb92f36b0a, please check neutron logs for more information. {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 592.193025] env[62096]: DEBUG nova.compute.manager [None req-4d9c1482-ab74-4f69-8438-670cc92231c6 tempest-VolumesAssistedSnapshotsTest-1011581688 tempest-VolumesAssistedSnapshotsTest-1011581688-project-member] [instance: 3530e93e-f729-471b-976b-e52c3182cb15] Unplugging VIFs for instance {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 592.193254] env[62096]: DEBUG oslo_concurrency.lockutils [None req-4d9c1482-ab74-4f69-8438-670cc92231c6 tempest-VolumesAssistedSnapshotsTest-1011581688 tempest-VolumesAssistedSnapshotsTest-1011581688-project-member] Acquiring lock "refresh_cache-3530e93e-f729-471b-976b-e52c3182cb15" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 592.193404] env[62096]: DEBUG oslo_concurrency.lockutils [None req-4d9c1482-ab74-4f69-8438-670cc92231c6 tempest-VolumesAssistedSnapshotsTest-1011581688 tempest-VolumesAssistedSnapshotsTest-1011581688-project-member] Acquired lock "refresh_cache-3530e93e-f729-471b-976b-e52c3182cb15" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 592.193559] env[62096]: DEBUG nova.network.neutron [None req-4d9c1482-ab74-4f69-8438-670cc92231c6 tempest-VolumesAssistedSnapshotsTest-1011581688 tempest-VolumesAssistedSnapshotsTest-1011581688-project-member] [instance: 3530e93e-f729-471b-976b-e52c3182cb15] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 592.194021] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4d9c1482-ab74-4f69-8438-670cc92231c6 tempest-VolumesAssistedSnapshotsTest-1011581688 tempest-VolumesAssistedSnapshotsTest-1011581688-project-member] Expecting reply to msg 93fa2e49095845bdac13eec7d53953de in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 592.204301] env[62096]: DEBUG nova.virt.hardware [None req-c02b4028-73ad-45a0-8da0-06fbbeeeef87 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-19T11:52:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-19T11:51:55Z,direct_url=,disk_format='vmdk',id=fb764baa-3805-45c4-a694-aa91b0932110,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bd76085d004b408bbcf1ebf76ba2593e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-19T11:51:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 592.204477] env[62096]: DEBUG nova.virt.hardware [None req-c02b4028-73ad-45a0-8da0-06fbbeeeef87 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Flavor limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 592.204627] env[62096]: DEBUG nova.virt.hardware [None req-c02b4028-73ad-45a0-8da0-06fbbeeeef87 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Image limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 592.204819] env[62096]: DEBUG nova.virt.hardware [None req-c02b4028-73ad-45a0-8da0-06fbbeeeef87 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Flavor pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 592.204972] env[62096]: DEBUG nova.virt.hardware [None req-c02b4028-73ad-45a0-8da0-06fbbeeeef87 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Image pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 592.205113] env[62096]: DEBUG nova.virt.hardware [None req-c02b4028-73ad-45a0-8da0-06fbbeeeef87 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 592.205312] env[62096]: DEBUG nova.virt.hardware [None req-c02b4028-73ad-45a0-8da0-06fbbeeeef87 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 592.205461] env[62096]: DEBUG nova.virt.hardware [None req-c02b4028-73ad-45a0-8da0-06fbbeeeef87 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 592.205617] env[62096]: DEBUG nova.virt.hardware [None req-c02b4028-73ad-45a0-8da0-06fbbeeeef87 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Got 1 possible topologies {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 592.205896] env[62096]: DEBUG nova.virt.hardware [None req-c02b4028-73ad-45a0-8da0-06fbbeeeef87 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 592.206018] env[62096]: DEBUG nova.virt.hardware [None req-c02b4028-73ad-45a0-8da0-06fbbeeeef87 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 592.208702] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 93fa2e49095845bdac13eec7d53953de [ 592.208702] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34fbc57b-7a8b-4940-96ba-d83c1ce77e49 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.218202] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-555e7d68-bc8a-4fb1-af8d-2b1028e39325 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.240077] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 97a34d80e6c84af1886f7fd445e870d7 [ 592.272463] env[62096]: DEBUG nova.network.neutron [None req-19c9aad9-d898-4c0f-b588-f60abf561c62 tempest-ServersTestBootFromVolume-2123063213 tempest-ServersTestBootFromVolume-2123063213-project-member] [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 592.272608] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-19c9aad9-d898-4c0f-b588-f60abf561c62 tempest-ServersTestBootFromVolume-2123063213 tempest-ServersTestBootFromVolume-2123063213-project-member] Expecting reply to msg 784c6fc7700e41ea8a01d6d8faf53bdc in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 592.281353] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 784c6fc7700e41ea8a01d6d8faf53bdc [ 592.527697] env[62096]: ERROR nova.compute.manager [None req-c02b4028-73ad-45a0-8da0-06fbbeeeef87 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 2371b750-f7b3-4954-904e-f8f99fe6cc78, please check neutron logs for more information. [ 592.527697] env[62096]: ERROR nova.compute.manager Traceback (most recent call last): [ 592.527697] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 592.527697] env[62096]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 592.527697] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 592.527697] env[62096]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 592.527697] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 592.527697] env[62096]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 592.527697] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 592.527697] env[62096]: ERROR nova.compute.manager self.force_reraise() [ 592.527697] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 592.527697] env[62096]: ERROR nova.compute.manager raise self.value [ 592.527697] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 592.527697] env[62096]: ERROR nova.compute.manager updated_port = self._update_port( [ 592.527697] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 592.527697] env[62096]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 592.528349] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 592.528349] env[62096]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 592.528349] env[62096]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 2371b750-f7b3-4954-904e-f8f99fe6cc78, please check neutron logs for more information. [ 592.528349] env[62096]: ERROR nova.compute.manager [ 592.528349] env[62096]: Traceback (most recent call last): [ 592.528349] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 592.528349] env[62096]: listener.cb(fileno) [ 592.528349] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 592.528349] env[62096]: result = function(*args, **kwargs) [ 592.528349] env[62096]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 592.528349] env[62096]: return func(*args, **kwargs) [ 592.528349] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 592.528349] env[62096]: raise e [ 592.528349] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 592.528349] env[62096]: nwinfo = self.network_api.allocate_for_instance( [ 592.528349] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 592.528349] env[62096]: created_port_ids = self._update_ports_for_instance( [ 592.528349] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 592.528349] env[62096]: with excutils.save_and_reraise_exception(): [ 592.528349] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 592.528349] env[62096]: self.force_reraise() [ 592.528349] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 592.528349] env[62096]: raise self.value [ 592.528349] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 592.528349] env[62096]: updated_port = self._update_port( [ 592.528349] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 592.528349] env[62096]: _ensure_no_port_binding_failure(port) [ 592.528349] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 592.528349] env[62096]: raise exception.PortBindingFailed(port_id=port['id']) [ 592.529297] env[62096]: nova.exception.PortBindingFailed: Binding failed for port 2371b750-f7b3-4954-904e-f8f99fe6cc78, please check neutron logs for more information. [ 592.529297] env[62096]: Removing descriptor: 14 [ 592.529297] env[62096]: ERROR nova.compute.manager [None req-c02b4028-73ad-45a0-8da0-06fbbeeeef87 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 2371b750-f7b3-4954-904e-f8f99fe6cc78, please check neutron logs for more information. [ 592.529297] env[62096]: ERROR nova.compute.manager [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] Traceback (most recent call last): [ 592.529297] env[62096]: ERROR nova.compute.manager [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 592.529297] env[62096]: ERROR nova.compute.manager [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] yield resources [ 592.529297] env[62096]: ERROR nova.compute.manager [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 592.529297] env[62096]: ERROR nova.compute.manager [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] self.driver.spawn(context, instance, image_meta, [ 592.529297] env[62096]: ERROR nova.compute.manager [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 592.529297] env[62096]: ERROR nova.compute.manager [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] self._vmops.spawn(context, instance, image_meta, injected_files, [ 592.529297] env[62096]: ERROR nova.compute.manager [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 592.529297] env[62096]: ERROR nova.compute.manager [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] vm_ref = self.build_virtual_machine(instance, [ 592.529705] env[62096]: ERROR nova.compute.manager [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 592.529705] env[62096]: ERROR nova.compute.manager [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] vif_infos = vmwarevif.get_vif_info(self._session, [ 592.529705] env[62096]: ERROR nova.compute.manager [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 592.529705] env[62096]: ERROR nova.compute.manager [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] for vif in network_info: [ 592.529705] env[62096]: ERROR nova.compute.manager [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 592.529705] env[62096]: ERROR nova.compute.manager [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] return self._sync_wrapper(fn, *args, **kwargs) [ 592.529705] env[62096]: ERROR nova.compute.manager [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 592.529705] env[62096]: ERROR nova.compute.manager [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] self.wait() [ 592.529705] env[62096]: ERROR nova.compute.manager [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 592.529705] env[62096]: ERROR nova.compute.manager [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] self[:] = self._gt.wait() [ 592.529705] env[62096]: ERROR nova.compute.manager [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 592.529705] env[62096]: ERROR nova.compute.manager [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] return self._exit_event.wait() [ 592.529705] env[62096]: ERROR nova.compute.manager [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 592.530142] env[62096]: ERROR nova.compute.manager [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] result = hub.switch() [ 592.530142] env[62096]: ERROR nova.compute.manager [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 592.530142] env[62096]: ERROR nova.compute.manager [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] return self.greenlet.switch() [ 592.530142] env[62096]: ERROR nova.compute.manager [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 592.530142] env[62096]: ERROR nova.compute.manager [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] result = function(*args, **kwargs) [ 592.530142] env[62096]: ERROR nova.compute.manager [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 592.530142] env[62096]: ERROR nova.compute.manager [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] return func(*args, **kwargs) [ 592.530142] env[62096]: ERROR nova.compute.manager [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 592.530142] env[62096]: ERROR nova.compute.manager [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] raise e [ 592.530142] env[62096]: ERROR nova.compute.manager [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 592.530142] env[62096]: ERROR nova.compute.manager [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] nwinfo = self.network_api.allocate_for_instance( [ 592.530142] env[62096]: ERROR nova.compute.manager [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 592.530142] env[62096]: ERROR nova.compute.manager [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] created_port_ids = self._update_ports_for_instance( [ 592.530564] env[62096]: ERROR nova.compute.manager [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 592.530564] env[62096]: ERROR nova.compute.manager [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] with excutils.save_and_reraise_exception(): [ 592.530564] env[62096]: ERROR nova.compute.manager [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 592.530564] env[62096]: ERROR nova.compute.manager [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] self.force_reraise() [ 592.530564] env[62096]: ERROR nova.compute.manager [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 592.530564] env[62096]: ERROR nova.compute.manager [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] raise self.value [ 592.530564] env[62096]: ERROR nova.compute.manager [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 592.530564] env[62096]: ERROR nova.compute.manager [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] updated_port = self._update_port( [ 592.530564] env[62096]: ERROR nova.compute.manager [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 592.530564] env[62096]: ERROR nova.compute.manager [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] _ensure_no_port_binding_failure(port) [ 592.530564] env[62096]: ERROR nova.compute.manager [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 592.530564] env[62096]: ERROR nova.compute.manager [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] raise exception.PortBindingFailed(port_id=port['id']) [ 592.530959] env[62096]: ERROR nova.compute.manager [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] nova.exception.PortBindingFailed: Binding failed for port 2371b750-f7b3-4954-904e-f8f99fe6cc78, please check neutron logs for more information. [ 592.530959] env[62096]: ERROR nova.compute.manager [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] [ 592.530959] env[62096]: INFO nova.compute.manager [None req-c02b4028-73ad-45a0-8da0-06fbbeeeef87 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] Terminating instance [ 592.531926] env[62096]: DEBUG oslo_concurrency.lockutils [None req-c02b4028-73ad-45a0-8da0-06fbbeeeef87 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Acquiring lock "refresh_cache-4fdcbe73-6d7e-4f89-b83d-e6b1664bd317" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 592.532177] env[62096]: DEBUG oslo_concurrency.lockutils [None req-c02b4028-73ad-45a0-8da0-06fbbeeeef87 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Acquired lock "refresh_cache-4fdcbe73-6d7e-4f89-b83d-e6b1664bd317" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 592.532371] env[62096]: DEBUG nova.network.neutron [None req-c02b4028-73ad-45a0-8da0-06fbbeeeef87 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 592.532792] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c02b4028-73ad-45a0-8da0-06fbbeeeef87 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Expecting reply to msg b0f8c4a442f843dd90cc6d698c602f3e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 592.539701] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b0f8c4a442f843dd90cc6d698c602f3e [ 592.584090] env[62096]: DEBUG nova.compute.manager [req-76411d8f-8981-42e3-bef9-224a823a050f req-2dc05533-23a7-44b7-945d-5644b4892d88 service nova] [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] Received event network-changed-2371b750-f7b3-4954-904e-f8f99fe6cc78 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 592.584324] env[62096]: DEBUG nova.compute.manager [req-76411d8f-8981-42e3-bef9-224a823a050f req-2dc05533-23a7-44b7-945d-5644b4892d88 service nova] [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] Refreshing instance network info cache due to event network-changed-2371b750-f7b3-4954-904e-f8f99fe6cc78. {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 592.584480] env[62096]: DEBUG oslo_concurrency.lockutils [req-76411d8f-8981-42e3-bef9-224a823a050f req-2dc05533-23a7-44b7-945d-5644b4892d88 service nova] Acquiring lock "refresh_cache-4fdcbe73-6d7e-4f89-b83d-e6b1664bd317" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 592.703310] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-bff733c1-b59e-456d-a598-27648ab77c9f tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Expecting reply to msg b58e71c58e57433eb17c9d7bdfc0ddbc in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 592.713819] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b58e71c58e57433eb17c9d7bdfc0ddbc [ 592.780997] env[62096]: DEBUG oslo_concurrency.lockutils [None req-19c9aad9-d898-4c0f-b588-f60abf561c62 tempest-ServersTestBootFromVolume-2123063213 tempest-ServersTestBootFromVolume-2123063213-project-member] Releasing lock "refresh_cache-4fca4cf6-1c0e-487e-bf26-fc441d143128" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 592.780997] env[62096]: DEBUG nova.compute.manager [None req-19c9aad9-d898-4c0f-b588-f60abf561c62 tempest-ServersTestBootFromVolume-2123063213 tempest-ServersTestBootFromVolume-2123063213-project-member] [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] Start destroying the instance on the hypervisor. {{(pid=62096) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 592.780997] env[62096]: DEBUG oslo_concurrency.lockutils [req-b8a10c56-431d-43ea-acd6-6144abcbcf68 req-1efb986b-08ca-43da-a9e6-f2816dfb04a8 service nova] Acquired lock "refresh_cache-4fca4cf6-1c0e-487e-bf26-fc441d143128" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 592.780997] env[62096]: DEBUG nova.network.neutron [req-b8a10c56-431d-43ea-acd6-6144abcbcf68 req-1efb986b-08ca-43da-a9e6-f2816dfb04a8 service nova] [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] Refreshing network info cache for port e792c2a8-f6e8-407d-9d05-07ff5ba3feb0 {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 592.780997] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-b8a10c56-431d-43ea-acd6-6144abcbcf68 req-1efb986b-08ca-43da-a9e6-f2816dfb04a8 service nova] Expecting reply to msg 849f4460d8aa4892a6d0d2795320e55c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 592.781270] env[62096]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d6c204ba-c073-4136-a9f4-6c865efb50a5 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.786743] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 849f4460d8aa4892a6d0d2795320e55c [ 592.790280] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d41cdfe5-03bc-4a42-9a1f-1a4e05002993 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.820717] env[62096]: WARNING nova.virt.vmwareapi.driver [None req-19c9aad9-d898-4c0f-b588-f60abf561c62 tempest-ServersTestBootFromVolume-2123063213 tempest-ServersTestBootFromVolume-2123063213-project-member] [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] Instance does not exists. Proceeding to delete instance properties on datastore: nova.exception.InstanceNotFound: Instance 4fca4cf6-1c0e-487e-bf26-fc441d143128 could not be found. [ 592.821126] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-19c9aad9-d898-4c0f-b588-f60abf561c62 tempest-ServersTestBootFromVolume-2123063213 tempest-ServersTestBootFromVolume-2123063213-project-member] [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] Destroying instance {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 592.821221] env[62096]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-efe4cea8-097e-4469-8547-1e2bf2b25b29 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.828973] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2818b0ee-caf8-4b77-91db-886885565d6d {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.852775] env[62096]: WARNING nova.virt.vmwareapi.vmops [None req-19c9aad9-d898-4c0f-b588-f60abf561c62 tempest-ServersTestBootFromVolume-2123063213 tempest-ServersTestBootFromVolume-2123063213-project-member] [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 4fca4cf6-1c0e-487e-bf26-fc441d143128 could not be found. [ 592.853772] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-19c9aad9-d898-4c0f-b588-f60abf561c62 tempest-ServersTestBootFromVolume-2123063213 tempest-ServersTestBootFromVolume-2123063213-project-member] [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] Instance destroyed {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 592.854044] env[62096]: INFO nova.compute.manager [None req-19c9aad9-d898-4c0f-b588-f60abf561c62 tempest-ServersTestBootFromVolume-2123063213 tempest-ServersTestBootFromVolume-2123063213-project-member] [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] Took 0.08 seconds to destroy the instance on the hypervisor. [ 592.854321] env[62096]: DEBUG oslo.service.loopingcall [None req-19c9aad9-d898-4c0f-b588-f60abf561c62 tempest-ServersTestBootFromVolume-2123063213 tempest-ServersTestBootFromVolume-2123063213-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 592.856778] env[62096]: DEBUG nova.network.neutron [None req-4d9c1482-ab74-4f69-8438-670cc92231c6 tempest-VolumesAssistedSnapshotsTest-1011581688 tempest-VolumesAssistedSnapshotsTest-1011581688-project-member] [instance: 3530e93e-f729-471b-976b-e52c3182cb15] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 592.857360] env[62096]: DEBUG nova.compute.manager [-] [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 592.857519] env[62096]: DEBUG nova.network.neutron [-] [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 592.902573] env[62096]: DEBUG nova.network.neutron [-] [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 592.903194] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 453d9a3c996649a5b09b821d5bc358cb in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 592.910760] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 453d9a3c996649a5b09b821d5bc358cb [ 592.982315] env[62096]: DEBUG nova.network.neutron [None req-4d9c1482-ab74-4f69-8438-670cc92231c6 tempest-VolumesAssistedSnapshotsTest-1011581688 tempest-VolumesAssistedSnapshotsTest-1011581688-project-member] [instance: 3530e93e-f729-471b-976b-e52c3182cb15] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 592.982842] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4d9c1482-ab74-4f69-8438-670cc92231c6 tempest-VolumesAssistedSnapshotsTest-1011581688 tempest-VolumesAssistedSnapshotsTest-1011581688-project-member] Expecting reply to msg 1b4d88fad8fe4745a76208d697dcfb14 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 592.992919] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1b4d88fad8fe4745a76208d697dcfb14 [ 593.060017] env[62096]: DEBUG nova.network.neutron [None req-c02b4028-73ad-45a0-8da0-06fbbeeeef87 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 593.168412] env[62096]: DEBUG nova.network.neutron [None req-c02b4028-73ad-45a0-8da0-06fbbeeeef87 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 593.168848] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c02b4028-73ad-45a0-8da0-06fbbeeeef87 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Expecting reply to msg d2f11cb612504a0aaf1af9fb2090fcaa in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 593.180582] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d2f11cb612504a0aaf1af9fb2090fcaa [ 593.333998] env[62096]: DEBUG nova.network.neutron [req-b8a10c56-431d-43ea-acd6-6144abcbcf68 req-1efb986b-08ca-43da-a9e6-f2816dfb04a8 service nova] [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 593.406061] env[62096]: DEBUG nova.network.neutron [-] [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 593.406448] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 77ad47ba37ce4a5097e3410313ad720d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 593.420702] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 77ad47ba37ce4a5097e3410313ad720d [ 593.487433] env[62096]: DEBUG oslo_concurrency.lockutils [None req-4d9c1482-ab74-4f69-8438-670cc92231c6 tempest-VolumesAssistedSnapshotsTest-1011581688 tempest-VolumesAssistedSnapshotsTest-1011581688-project-member] Releasing lock "refresh_cache-3530e93e-f729-471b-976b-e52c3182cb15" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 593.487649] env[62096]: DEBUG nova.compute.manager [None req-4d9c1482-ab74-4f69-8438-670cc92231c6 tempest-VolumesAssistedSnapshotsTest-1011581688 tempest-VolumesAssistedSnapshotsTest-1011581688-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 593.487820] env[62096]: DEBUG nova.compute.manager [None req-4d9c1482-ab74-4f69-8438-670cc92231c6 tempest-VolumesAssistedSnapshotsTest-1011581688 tempest-VolumesAssistedSnapshotsTest-1011581688-project-member] [instance: 3530e93e-f729-471b-976b-e52c3182cb15] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 593.488010] env[62096]: DEBUG nova.network.neutron [None req-4d9c1482-ab74-4f69-8438-670cc92231c6 tempest-VolumesAssistedSnapshotsTest-1011581688 tempest-VolumesAssistedSnapshotsTest-1011581688-project-member] [instance: 3530e93e-f729-471b-976b-e52c3182cb15] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 593.519284] env[62096]: DEBUG nova.network.neutron [None req-4d9c1482-ab74-4f69-8438-670cc92231c6 tempest-VolumesAssistedSnapshotsTest-1011581688 tempest-VolumesAssistedSnapshotsTest-1011581688-project-member] [instance: 3530e93e-f729-471b-976b-e52c3182cb15] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 593.519873] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4d9c1482-ab74-4f69-8438-670cc92231c6 tempest-VolumesAssistedSnapshotsTest-1011581688 tempest-VolumesAssistedSnapshotsTest-1011581688-project-member] Expecting reply to msg 805c7e7f39144ed7b4f84aed6a3bedfc in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 593.524701] env[62096]: DEBUG nova.network.neutron [req-b8a10c56-431d-43ea-acd6-6144abcbcf68 req-1efb986b-08ca-43da-a9e6-f2816dfb04a8 service nova] [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 593.524701] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-b8a10c56-431d-43ea-acd6-6144abcbcf68 req-1efb986b-08ca-43da-a9e6-f2816dfb04a8 service nova] Expecting reply to msg 43bfda29afb646dda280bf3aa7d75ec7 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 593.531378] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 805c7e7f39144ed7b4f84aed6a3bedfc [ 593.533577] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 43bfda29afb646dda280bf3aa7d75ec7 [ 593.671121] env[62096]: DEBUG oslo_concurrency.lockutils [None req-c02b4028-73ad-45a0-8da0-06fbbeeeef87 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Releasing lock "refresh_cache-4fdcbe73-6d7e-4f89-b83d-e6b1664bd317" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 593.671664] env[62096]: DEBUG nova.compute.manager [None req-c02b4028-73ad-45a0-8da0-06fbbeeeef87 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] Start destroying the instance on the hypervisor. {{(pid=62096) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 593.671932] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-c02b4028-73ad-45a0-8da0-06fbbeeeef87 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] Destroying instance {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 593.673298] env[62096]: DEBUG oslo_concurrency.lockutils [req-76411d8f-8981-42e3-bef9-224a823a050f req-2dc05533-23a7-44b7-945d-5644b4892d88 service nova] Acquired lock "refresh_cache-4fdcbe73-6d7e-4f89-b83d-e6b1664bd317" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 593.673512] env[62096]: DEBUG nova.network.neutron [req-76411d8f-8981-42e3-bef9-224a823a050f req-2dc05533-23a7-44b7-945d-5644b4892d88 service nova] [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] Refreshing network info cache for port 2371b750-f7b3-4954-904e-f8f99fe6cc78 {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 593.674025] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-76411d8f-8981-42e3-bef9-224a823a050f req-2dc05533-23a7-44b7-945d-5644b4892d88 service nova] Expecting reply to msg bd4142b5d5934771a5e6760f2aff87a1 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 593.674797] env[62096]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-54b71ac3-3310-44cf-88f8-01d289b3881c {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.678829] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb7dd50c-f5b7-43e1-b20f-60ed3a15eb37 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.681619] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bd4142b5d5934771a5e6760f2aff87a1 [ 593.692965] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6693ee67-df4d-49e2-8bb2-8aef601180a3 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.703728] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c640f49-6936-4d2b-9221-08376d48bdd9 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.740712] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efb85e50-27cd-4da3-8f6d-8cee5b842a0b {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.743549] env[62096]: WARNING nova.virt.vmwareapi.vmops [None req-c02b4028-73ad-45a0-8da0-06fbbeeeef87 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317 could not be found. [ 593.743801] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-c02b4028-73ad-45a0-8da0-06fbbeeeef87 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] Instance destroyed {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 593.744079] env[62096]: INFO nova.compute.manager [None req-c02b4028-73ad-45a0-8da0-06fbbeeeef87 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] Took 0.07 seconds to destroy the instance on the hypervisor. [ 593.744407] env[62096]: DEBUG oslo.service.loopingcall [None req-c02b4028-73ad-45a0-8da0-06fbbeeeef87 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 593.744559] env[62096]: DEBUG nova.compute.manager [-] [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 593.744647] env[62096]: DEBUG nova.network.neutron [-] [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 593.750750] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-498469da-0af7-409b-a22e-0918f2995479 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.764392] env[62096]: DEBUG nova.compute.provider_tree [None req-bff733c1-b59e-456d-a598-27648ab77c9f tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 593.764835] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-bff733c1-b59e-456d-a598-27648ab77c9f tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Expecting reply to msg 8a6918e0a90e48a5a6171e0ce3a722c4 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 593.772332] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8a6918e0a90e48a5a6171e0ce3a722c4 [ 593.773300] env[62096]: DEBUG nova.network.neutron [-] [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 593.773876] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg d56ef5dc491e42cc8f8f81569b339917 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 593.779929] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d56ef5dc491e42cc8f8f81569b339917 [ 593.909974] env[62096]: INFO nova.compute.manager [-] [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] Took 1.05 seconds to deallocate network for instance. [ 593.916803] env[62096]: DEBUG nova.compute.manager [req-7e42a5b2-342c-4a53-b3e0-9f56791b0d1c req-3ab8be12-848b-4328-8c6f-1ae829360482 service nova] [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] Received event network-vif-deleted-e792c2a8-f6e8-407d-9d05-07ff5ba3feb0 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 594.022371] env[62096]: DEBUG nova.network.neutron [None req-4d9c1482-ab74-4f69-8438-670cc92231c6 tempest-VolumesAssistedSnapshotsTest-1011581688 tempest-VolumesAssistedSnapshotsTest-1011581688-project-member] [instance: 3530e93e-f729-471b-976b-e52c3182cb15] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 594.022822] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4d9c1482-ab74-4f69-8438-670cc92231c6 tempest-VolumesAssistedSnapshotsTest-1011581688 tempest-VolumesAssistedSnapshotsTest-1011581688-project-member] Expecting reply to msg 22f9be4a947c4f6dac7a2525e7d2cba0 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 594.026545] env[62096]: DEBUG oslo_concurrency.lockutils [req-b8a10c56-431d-43ea-acd6-6144abcbcf68 req-1efb986b-08ca-43da-a9e6-f2816dfb04a8 service nova] Releasing lock "refresh_cache-4fca4cf6-1c0e-487e-bf26-fc441d143128" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 594.036302] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 22f9be4a947c4f6dac7a2525e7d2cba0 [ 594.199422] env[62096]: DEBUG nova.network.neutron [req-76411d8f-8981-42e3-bef9-224a823a050f req-2dc05533-23a7-44b7-945d-5644b4892d88 service nova] [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 594.267600] env[62096]: DEBUG nova.scheduler.client.report [None req-bff733c1-b59e-456d-a598-27648ab77c9f tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 594.270338] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-bff733c1-b59e-456d-a598-27648ab77c9f tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Expecting reply to msg 9ecc1a284d0b463193ff6525e294f037 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 594.276269] env[62096]: DEBUG nova.network.neutron [-] [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 594.276783] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 797d5fae14bd4f91886cd61ae5a55d90 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 594.284681] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 797d5fae14bd4f91886cd61ae5a55d90 [ 594.286417] env[62096]: DEBUG nova.network.neutron [req-76411d8f-8981-42e3-bef9-224a823a050f req-2dc05533-23a7-44b7-945d-5644b4892d88 service nova] [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 594.286907] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-76411d8f-8981-42e3-bef9-224a823a050f req-2dc05533-23a7-44b7-945d-5644b4892d88 service nova] Expecting reply to msg 80aa7e5a80dd4048a1788071c19ce80b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 594.292682] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9ecc1a284d0b463193ff6525e294f037 [ 594.297978] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 80aa7e5a80dd4048a1788071c19ce80b [ 594.486559] env[62096]: INFO nova.compute.manager [None req-19c9aad9-d898-4c0f-b588-f60abf561c62 tempest-ServersTestBootFromVolume-2123063213 tempest-ServersTestBootFromVolume-2123063213-project-member] [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] Took 0.58 seconds to detach 1 volumes for instance. [ 594.488757] env[62096]: DEBUG nova.compute.claims [None req-19c9aad9-d898-4c0f-b588-f60abf561c62 tempest-ServersTestBootFromVolume-2123063213 tempest-ServersTestBootFromVolume-2123063213-project-member] [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] Aborting claim: {{(pid=62096) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 594.488939] env[62096]: DEBUG oslo_concurrency.lockutils [None req-19c9aad9-d898-4c0f-b588-f60abf561c62 tempest-ServersTestBootFromVolume-2123063213 tempest-ServersTestBootFromVolume-2123063213-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 594.528186] env[62096]: INFO nova.compute.manager [None req-4d9c1482-ab74-4f69-8438-670cc92231c6 tempest-VolumesAssistedSnapshotsTest-1011581688 tempest-VolumesAssistedSnapshotsTest-1011581688-project-member] [instance: 3530e93e-f729-471b-976b-e52c3182cb15] Took 1.04 seconds to deallocate network for instance. [ 594.528186] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4d9c1482-ab74-4f69-8438-670cc92231c6 tempest-VolumesAssistedSnapshotsTest-1011581688 tempest-VolumesAssistedSnapshotsTest-1011581688-project-member] Expecting reply to msg 388797527f9f46519572bf245d7586b7 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 594.571168] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 388797527f9f46519572bf245d7586b7 [ 594.774913] env[62096]: DEBUG oslo_concurrency.lockutils [None req-bff733c1-b59e-456d-a598-27648ab77c9f tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.593s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 594.774913] env[62096]: DEBUG nova.compute.manager [None req-bff733c1-b59e-456d-a598-27648ab77c9f tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] Start building networks asynchronously for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 594.775749] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-bff733c1-b59e-456d-a598-27648ab77c9f tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Expecting reply to msg ba3e232f185a41ceab68210a1fe2b5d7 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 594.776790] env[62096]: DEBUG oslo_concurrency.lockutils [None req-298b76a9-0fa7-45b3-84c9-138485632e29 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 18.198s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 594.778463] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-298b76a9-0fa7-45b3-84c9-138485632e29 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Expecting reply to msg f50a6c0ae02f4938a32cb1b62fbf23e7 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 594.779697] env[62096]: INFO nova.compute.manager [-] [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] Took 1.03 seconds to deallocate network for instance. [ 594.782513] env[62096]: DEBUG nova.compute.claims [None req-c02b4028-73ad-45a0-8da0-06fbbeeeef87 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] Aborting claim: {{(pid=62096) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 594.782690] env[62096]: DEBUG oslo_concurrency.lockutils [None req-c02b4028-73ad-45a0-8da0-06fbbeeeef87 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 594.790554] env[62096]: DEBUG oslo_concurrency.lockutils [req-76411d8f-8981-42e3-bef9-224a823a050f req-2dc05533-23a7-44b7-945d-5644b4892d88 service nova] Releasing lock "refresh_cache-4fdcbe73-6d7e-4f89-b83d-e6b1664bd317" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 594.823456] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ba3e232f185a41ceab68210a1fe2b5d7 [ 594.830289] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f50a6c0ae02f4938a32cb1b62fbf23e7 [ 595.031869] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4d9c1482-ab74-4f69-8438-670cc92231c6 tempest-VolumesAssistedSnapshotsTest-1011581688 tempest-VolumesAssistedSnapshotsTest-1011581688-project-member] Expecting reply to msg 2776607a105a4d35b81956fac877a082 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 595.078321] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2776607a105a4d35b81956fac877a082 [ 595.286171] env[62096]: DEBUG nova.compute.utils [None req-bff733c1-b59e-456d-a598-27648ab77c9f tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Using /dev/sd instead of None {{(pid=62096) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 595.286171] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-bff733c1-b59e-456d-a598-27648ab77c9f tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Expecting reply to msg 9777711bb710420eb82843721f8e37eb in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 595.287573] env[62096]: DEBUG nova.compute.manager [None req-bff733c1-b59e-456d-a598-27648ab77c9f tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] Allocating IP information in the background. {{(pid=62096) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 595.287893] env[62096]: DEBUG nova.network.neutron [None req-bff733c1-b59e-456d-a598-27648ab77c9f tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] allocate_for_instance() {{(pid=62096) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 595.292295] env[62096]: DEBUG nova.compute.manager [req-b80a63c5-6319-42ee-b3ac-20b40def5669 req-bacf408f-d874-48f6-9dfe-a809bae30b77 service nova] [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] Received event network-vif-deleted-2371b750-f7b3-4954-904e-f8f99fe6cc78 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 595.292975] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9777711bb710420eb82843721f8e37eb [ 595.372767] env[62096]: DEBUG nova.policy [None req-bff733c1-b59e-456d-a598-27648ab77c9f tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '220981d20a1a4f76a40389f3c9a3d761', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7b65a4cddd9a4c728d0ccbd6ce3e59b7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62096) authorize /opt/stack/nova/nova/policy.py:203}} [ 595.569112] env[62096]: INFO nova.scheduler.client.report [None req-4d9c1482-ab74-4f69-8438-670cc92231c6 tempest-VolumesAssistedSnapshotsTest-1011581688 tempest-VolumesAssistedSnapshotsTest-1011581688-project-member] Deleted allocations for instance 3530e93e-f729-471b-976b-e52c3182cb15 [ 595.575219] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4d9c1482-ab74-4f69-8438-670cc92231c6 tempest-VolumesAssistedSnapshotsTest-1011581688 tempest-VolumesAssistedSnapshotsTest-1011581688-project-member] Expecting reply to msg 87c2fe49825546b99b73e093c1fb3e0b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 595.590234] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 87c2fe49825546b99b73e093c1fb3e0b [ 595.604910] env[62096]: DEBUG oslo_concurrency.lockutils [None req-6d518a50-dadb-4b6d-9314-0f65d9990dab tempest-ServersNegativeTestMultiTenantJSON-1390208586 tempest-ServersNegativeTestMultiTenantJSON-1390208586-project-member] Acquiring lock "4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 595.605144] env[62096]: DEBUG oslo_concurrency.lockutils [None req-6d518a50-dadb-4b6d-9314-0f65d9990dab tempest-ServersNegativeTestMultiTenantJSON-1390208586 tempest-ServersNegativeTestMultiTenantJSON-1390208586-project-member] Lock "4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 595.758028] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-800371f9-1177-48de-b384-183994bebcbe {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.766517] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8712756f-363b-42d3-82c6-754a20e2684e {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.814446] env[62096]: DEBUG nova.compute.manager [None req-bff733c1-b59e-456d-a598-27648ab77c9f tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] Start building block device mappings for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 595.816562] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-bff733c1-b59e-456d-a598-27648ab77c9f tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Expecting reply to msg 70da22fcbad3424f9b342e5c841ccdbc in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 595.818684] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff595c58-b135-48c0-a1a7-afaabafb9299 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.827590] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19dd86ac-bdc4-4aa0-a813-02ee85b3c4e8 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.845905] env[62096]: DEBUG nova.compute.provider_tree [None req-298b76a9-0fa7-45b3-84c9-138485632e29 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 595.846257] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-298b76a9-0fa7-45b3-84c9-138485632e29 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Expecting reply to msg 5439f1b783dd41efaaafc62f0c0d1e6c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 595.856153] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5439f1b783dd41efaaafc62f0c0d1e6c [ 595.862055] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 70da22fcbad3424f9b342e5c841ccdbc [ 596.049152] env[62096]: DEBUG nova.network.neutron [None req-bff733c1-b59e-456d-a598-27648ab77c9f tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] Successfully created port: 23cb598f-2b29-4f1e-ba72-c92aa3797f30 {{(pid=62096) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 596.077322] env[62096]: DEBUG oslo_concurrency.lockutils [None req-4d9c1482-ab74-4f69-8438-670cc92231c6 tempest-VolumesAssistedSnapshotsTest-1011581688 tempest-VolumesAssistedSnapshotsTest-1011581688-project-member] Lock "3530e93e-f729-471b-976b-e52c3182cb15" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 54.987s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 596.077984] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Expecting reply to msg 83d60ef329564061932315a6ab6e083b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 596.092951] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 83d60ef329564061932315a6ab6e083b [ 596.325347] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-bff733c1-b59e-456d-a598-27648ab77c9f tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Expecting reply to msg b5b1b2a82e434db287b9e5afa33aed67 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 596.349191] env[62096]: DEBUG nova.scheduler.client.report [None req-298b76a9-0fa7-45b3-84c9-138485632e29 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 596.351520] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-298b76a9-0fa7-45b3-84c9-138485632e29 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Expecting reply to msg 9c554baad1c04b4eaa960e221414c76a in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 596.362672] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b5b1b2a82e434db287b9e5afa33aed67 [ 596.368171] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9c554baad1c04b4eaa960e221414c76a [ 596.580325] env[62096]: DEBUG nova.compute.manager [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] [instance: 12a4b44d-b6aa-45e2-bba9-d73f41fa4b61] Starting instance... {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 596.582082] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Expecting reply to msg d59d382827c64737a62a4c1e8cb9e73d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 596.623799] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d59d382827c64737a62a4c1e8cb9e73d [ 596.832029] env[62096]: DEBUG nova.compute.manager [None req-bff733c1-b59e-456d-a598-27648ab77c9f tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] Start spawning the instance on the hypervisor. {{(pid=62096) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 596.856086] env[62096]: DEBUG nova.virt.hardware [None req-bff733c1-b59e-456d-a598-27648ab77c9f tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-19T11:52:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-19T11:51:55Z,direct_url=,disk_format='vmdk',id=fb764baa-3805-45c4-a694-aa91b0932110,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bd76085d004b408bbcf1ebf76ba2593e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-19T11:51:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 596.856356] env[62096]: DEBUG nova.virt.hardware [None req-bff733c1-b59e-456d-a598-27648ab77c9f tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Flavor limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 596.856522] env[62096]: DEBUG nova.virt.hardware [None req-bff733c1-b59e-456d-a598-27648ab77c9f tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Image limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 596.856742] env[62096]: DEBUG nova.virt.hardware [None req-bff733c1-b59e-456d-a598-27648ab77c9f tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Flavor pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 596.856884] env[62096]: DEBUG nova.virt.hardware [None req-bff733c1-b59e-456d-a598-27648ab77c9f tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Image pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 596.856972] env[62096]: DEBUG nova.virt.hardware [None req-bff733c1-b59e-456d-a598-27648ab77c9f tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 596.857162] env[62096]: DEBUG nova.virt.hardware [None req-bff733c1-b59e-456d-a598-27648ab77c9f tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 596.857314] env[62096]: DEBUG nova.virt.hardware [None req-bff733c1-b59e-456d-a598-27648ab77c9f tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 596.857518] env[62096]: DEBUG nova.virt.hardware [None req-bff733c1-b59e-456d-a598-27648ab77c9f tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Got 1 possible topologies {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 596.857726] env[62096]: DEBUG nova.virt.hardware [None req-bff733c1-b59e-456d-a598-27648ab77c9f tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 596.857914] env[62096]: DEBUG nova.virt.hardware [None req-bff733c1-b59e-456d-a598-27648ab77c9f tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 596.858680] env[62096]: DEBUG oslo_concurrency.lockutils [None req-298b76a9-0fa7-45b3-84c9-138485632e29 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.082s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 596.859276] env[62096]: ERROR nova.compute.manager [None req-298b76a9-0fa7-45b3-84c9-138485632e29 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 3d2c2a0a-10a1-4b8d-b866-46b384c96647, please check neutron logs for more information. [ 596.859276] env[62096]: ERROR nova.compute.manager [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] Traceback (most recent call last): [ 596.859276] env[62096]: ERROR nova.compute.manager [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 596.859276] env[62096]: ERROR nova.compute.manager [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] self.driver.spawn(context, instance, image_meta, [ 596.859276] env[62096]: ERROR nova.compute.manager [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 596.859276] env[62096]: ERROR nova.compute.manager [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] self._vmops.spawn(context, instance, image_meta, injected_files, [ 596.859276] env[62096]: ERROR nova.compute.manager [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 596.859276] env[62096]: ERROR nova.compute.manager [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] vm_ref = self.build_virtual_machine(instance, [ 596.859276] env[62096]: ERROR nova.compute.manager [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 596.859276] env[62096]: ERROR nova.compute.manager [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] vif_infos = vmwarevif.get_vif_info(self._session, [ 596.859276] env[62096]: ERROR nova.compute.manager [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 596.859659] env[62096]: ERROR nova.compute.manager [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] for vif in network_info: [ 596.859659] env[62096]: ERROR nova.compute.manager [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 596.859659] env[62096]: ERROR nova.compute.manager [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] return self._sync_wrapper(fn, *args, **kwargs) [ 596.859659] env[62096]: ERROR nova.compute.manager [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 596.859659] env[62096]: ERROR nova.compute.manager [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] self.wait() [ 596.859659] env[62096]: ERROR nova.compute.manager [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 596.859659] env[62096]: ERROR nova.compute.manager [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] self[:] = self._gt.wait() [ 596.859659] env[62096]: ERROR nova.compute.manager [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 596.859659] env[62096]: ERROR nova.compute.manager [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] return self._exit_event.wait() [ 596.859659] env[62096]: ERROR nova.compute.manager [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 596.859659] env[62096]: ERROR nova.compute.manager [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] result = hub.switch() [ 596.859659] env[62096]: ERROR nova.compute.manager [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 596.859659] env[62096]: ERROR nova.compute.manager [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] return self.greenlet.switch() [ 596.860132] env[62096]: ERROR nova.compute.manager [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 596.860132] env[62096]: ERROR nova.compute.manager [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] result = function(*args, **kwargs) [ 596.860132] env[62096]: ERROR nova.compute.manager [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 596.860132] env[62096]: ERROR nova.compute.manager [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] return func(*args, **kwargs) [ 596.860132] env[62096]: ERROR nova.compute.manager [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 596.860132] env[62096]: ERROR nova.compute.manager [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] raise e [ 596.860132] env[62096]: ERROR nova.compute.manager [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 596.860132] env[62096]: ERROR nova.compute.manager [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] nwinfo = self.network_api.allocate_for_instance( [ 596.860132] env[62096]: ERROR nova.compute.manager [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 596.860132] env[62096]: ERROR nova.compute.manager [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] created_port_ids = self._update_ports_for_instance( [ 596.860132] env[62096]: ERROR nova.compute.manager [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 596.860132] env[62096]: ERROR nova.compute.manager [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] with excutils.save_and_reraise_exception(): [ 596.860132] env[62096]: ERROR nova.compute.manager [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 596.860544] env[62096]: ERROR nova.compute.manager [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] self.force_reraise() [ 596.860544] env[62096]: ERROR nova.compute.manager [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 596.860544] env[62096]: ERROR nova.compute.manager [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] raise self.value [ 596.860544] env[62096]: ERROR nova.compute.manager [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 596.860544] env[62096]: ERROR nova.compute.manager [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] updated_port = self._update_port( [ 596.860544] env[62096]: ERROR nova.compute.manager [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 596.860544] env[62096]: ERROR nova.compute.manager [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] _ensure_no_port_binding_failure(port) [ 596.860544] env[62096]: ERROR nova.compute.manager [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 596.860544] env[62096]: ERROR nova.compute.manager [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] raise exception.PortBindingFailed(port_id=port['id']) [ 596.860544] env[62096]: ERROR nova.compute.manager [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] nova.exception.PortBindingFailed: Binding failed for port 3d2c2a0a-10a1-4b8d-b866-46b384c96647, please check neutron logs for more information. [ 596.860544] env[62096]: ERROR nova.compute.manager [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] [ 596.860901] env[62096]: DEBUG nova.compute.utils [None req-298b76a9-0fa7-45b3-84c9-138485632e29 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] Binding failed for port 3d2c2a0a-10a1-4b8d-b866-46b384c96647, please check neutron logs for more information. {{(pid=62096) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 596.861749] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f355c7c9-fc85-4dbe-b954-3b6b5d70ef91 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.868771] env[62096]: DEBUG nova.compute.manager [None req-298b76a9-0fa7-45b3-84c9-138485632e29 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] Build of instance 94fbc312-96fb-4b62-adc7-18053d509eca was re-scheduled: Binding failed for port 3d2c2a0a-10a1-4b8d-b866-46b384c96647, please check neutron logs for more information. {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 596.869183] env[62096]: DEBUG nova.compute.manager [None req-298b76a9-0fa7-45b3-84c9-138485632e29 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] Unplugging VIFs for instance {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 596.869366] env[62096]: DEBUG oslo_concurrency.lockutils [None req-298b76a9-0fa7-45b3-84c9-138485632e29 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Acquiring lock "refresh_cache-94fbc312-96fb-4b62-adc7-18053d509eca" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 596.869483] env[62096]: DEBUG oslo_concurrency.lockutils [None req-298b76a9-0fa7-45b3-84c9-138485632e29 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Acquired lock "refresh_cache-94fbc312-96fb-4b62-adc7-18053d509eca" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 596.869634] env[62096]: DEBUG nova.network.neutron [None req-298b76a9-0fa7-45b3-84c9-138485632e29 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 596.870128] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-298b76a9-0fa7-45b3-84c9-138485632e29 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Expecting reply to msg e54161dbe5a0462c99c88fcee73141cd in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 596.872523] env[62096]: DEBUG oslo_concurrency.lockutils [None req-8703d55f-3f3e-42bb-b5d4-8086e8f02c90 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.560s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 596.874491] env[62096]: INFO nova.compute.claims [None req-8703d55f-3f3e-42bb-b5d4-8086e8f02c90 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 596.876604] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-8703d55f-3f3e-42bb-b5d4-8086e8f02c90 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Expecting reply to msg c3e97ba644b646c68cbde33a91d0587b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 596.890938] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e54161dbe5a0462c99c88fcee73141cd [ 596.895784] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed0de003-e173-4122-a5e4-f253f3af431d {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.936046] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c3e97ba644b646c68cbde33a91d0587b [ 597.107614] env[62096]: DEBUG oslo_concurrency.lockutils [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 597.393113] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-8703d55f-3f3e-42bb-b5d4-8086e8f02c90 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Expecting reply to msg b8b401999b924b3dad715e4a16c50223 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 597.397339] env[62096]: DEBUG nova.network.neutron [None req-298b76a9-0fa7-45b3-84c9-138485632e29 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 597.401606] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b8b401999b924b3dad715e4a16c50223 [ 597.574308] env[62096]: DEBUG nova.network.neutron [None req-298b76a9-0fa7-45b3-84c9-138485632e29 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 597.574833] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-298b76a9-0fa7-45b3-84c9-138485632e29 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Expecting reply to msg e9485ce32a5f43b4867ede2580ebd1c4 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 597.589484] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e9485ce32a5f43b4867ede2580ebd1c4 [ 597.671924] env[62096]: DEBUG nova.compute.manager [req-a4e6ccb8-0af6-4654-8532-e1d55af0531a req-72e661e3-6948-4ae7-aa8b-a4e2d5239025 service nova] [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] Received event network-changed-23cb598f-2b29-4f1e-ba72-c92aa3797f30 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 597.672176] env[62096]: DEBUG nova.compute.manager [req-a4e6ccb8-0af6-4654-8532-e1d55af0531a req-72e661e3-6948-4ae7-aa8b-a4e2d5239025 service nova] [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] Refreshing instance network info cache due to event network-changed-23cb598f-2b29-4f1e-ba72-c92aa3797f30. {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 597.672397] env[62096]: DEBUG oslo_concurrency.lockutils [req-a4e6ccb8-0af6-4654-8532-e1d55af0531a req-72e661e3-6948-4ae7-aa8b-a4e2d5239025 service nova] Acquiring lock "refresh_cache-6ac3df26-83ab-4519-a1e2-51286c1d1991" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 597.672539] env[62096]: DEBUG oslo_concurrency.lockutils [req-a4e6ccb8-0af6-4654-8532-e1d55af0531a req-72e661e3-6948-4ae7-aa8b-a4e2d5239025 service nova] Acquired lock "refresh_cache-6ac3df26-83ab-4519-a1e2-51286c1d1991" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 597.672696] env[62096]: DEBUG nova.network.neutron [req-a4e6ccb8-0af6-4654-8532-e1d55af0531a req-72e661e3-6948-4ae7-aa8b-a4e2d5239025 service nova] [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] Refreshing network info cache for port 23cb598f-2b29-4f1e-ba72-c92aa3797f30 {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 597.673120] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-a4e6ccb8-0af6-4654-8532-e1d55af0531a req-72e661e3-6948-4ae7-aa8b-a4e2d5239025 service nova] Expecting reply to msg e9c785937bad4a3885e0714760b58ed5 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 597.686245] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e9c785937bad4a3885e0714760b58ed5 [ 597.870140] env[62096]: ERROR nova.compute.manager [None req-bff733c1-b59e-456d-a598-27648ab77c9f tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 23cb598f-2b29-4f1e-ba72-c92aa3797f30, please check neutron logs for more information. [ 597.870140] env[62096]: ERROR nova.compute.manager Traceback (most recent call last): [ 597.870140] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 597.870140] env[62096]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 597.870140] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 597.870140] env[62096]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 597.870140] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 597.870140] env[62096]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 597.870140] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 597.870140] env[62096]: ERROR nova.compute.manager self.force_reraise() [ 597.870140] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 597.870140] env[62096]: ERROR nova.compute.manager raise self.value [ 597.870140] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 597.870140] env[62096]: ERROR nova.compute.manager updated_port = self._update_port( [ 597.870140] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 597.870140] env[62096]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 597.870752] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 597.870752] env[62096]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 597.870752] env[62096]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 23cb598f-2b29-4f1e-ba72-c92aa3797f30, please check neutron logs for more information. [ 597.870752] env[62096]: ERROR nova.compute.manager [ 597.870752] env[62096]: Traceback (most recent call last): [ 597.870752] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 597.870752] env[62096]: listener.cb(fileno) [ 597.870752] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 597.870752] env[62096]: result = function(*args, **kwargs) [ 597.870752] env[62096]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 597.870752] env[62096]: return func(*args, **kwargs) [ 597.870752] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 597.870752] env[62096]: raise e [ 597.870752] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 597.870752] env[62096]: nwinfo = self.network_api.allocate_for_instance( [ 597.870752] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 597.870752] env[62096]: created_port_ids = self._update_ports_for_instance( [ 597.870752] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 597.870752] env[62096]: with excutils.save_and_reraise_exception(): [ 597.870752] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 597.870752] env[62096]: self.force_reraise() [ 597.870752] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 597.870752] env[62096]: raise self.value [ 597.870752] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 597.870752] env[62096]: updated_port = self._update_port( [ 597.870752] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 597.870752] env[62096]: _ensure_no_port_binding_failure(port) [ 597.870752] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 597.870752] env[62096]: raise exception.PortBindingFailed(port_id=port['id']) [ 597.871587] env[62096]: nova.exception.PortBindingFailed: Binding failed for port 23cb598f-2b29-4f1e-ba72-c92aa3797f30, please check neutron logs for more information. [ 597.871587] env[62096]: Removing descriptor: 14 [ 597.871587] env[62096]: ERROR nova.compute.manager [None req-bff733c1-b59e-456d-a598-27648ab77c9f tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 23cb598f-2b29-4f1e-ba72-c92aa3797f30, please check neutron logs for more information. [ 597.871587] env[62096]: ERROR nova.compute.manager [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] Traceback (most recent call last): [ 597.871587] env[62096]: ERROR nova.compute.manager [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 597.871587] env[62096]: ERROR nova.compute.manager [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] yield resources [ 597.871587] env[62096]: ERROR nova.compute.manager [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 597.871587] env[62096]: ERROR nova.compute.manager [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] self.driver.spawn(context, instance, image_meta, [ 597.871587] env[62096]: ERROR nova.compute.manager [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 597.871587] env[62096]: ERROR nova.compute.manager [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] self._vmops.spawn(context, instance, image_meta, injected_files, [ 597.871587] env[62096]: ERROR nova.compute.manager [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 597.871587] env[62096]: ERROR nova.compute.manager [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] vm_ref = self.build_virtual_machine(instance, [ 597.871951] env[62096]: ERROR nova.compute.manager [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 597.871951] env[62096]: ERROR nova.compute.manager [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] vif_infos = vmwarevif.get_vif_info(self._session, [ 597.871951] env[62096]: ERROR nova.compute.manager [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 597.871951] env[62096]: ERROR nova.compute.manager [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] for vif in network_info: [ 597.871951] env[62096]: ERROR nova.compute.manager [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 597.871951] env[62096]: ERROR nova.compute.manager [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] return self._sync_wrapper(fn, *args, **kwargs) [ 597.871951] env[62096]: ERROR nova.compute.manager [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 597.871951] env[62096]: ERROR nova.compute.manager [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] self.wait() [ 597.871951] env[62096]: ERROR nova.compute.manager [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 597.871951] env[62096]: ERROR nova.compute.manager [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] self[:] = self._gt.wait() [ 597.871951] env[62096]: ERROR nova.compute.manager [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 597.871951] env[62096]: ERROR nova.compute.manager [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] return self._exit_event.wait() [ 597.871951] env[62096]: ERROR nova.compute.manager [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 597.872350] env[62096]: ERROR nova.compute.manager [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] result = hub.switch() [ 597.872350] env[62096]: ERROR nova.compute.manager [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 597.872350] env[62096]: ERROR nova.compute.manager [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] return self.greenlet.switch() [ 597.872350] env[62096]: ERROR nova.compute.manager [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 597.872350] env[62096]: ERROR nova.compute.manager [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] result = function(*args, **kwargs) [ 597.872350] env[62096]: ERROR nova.compute.manager [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 597.872350] env[62096]: ERROR nova.compute.manager [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] return func(*args, **kwargs) [ 597.872350] env[62096]: ERROR nova.compute.manager [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 597.872350] env[62096]: ERROR nova.compute.manager [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] raise e [ 597.872350] env[62096]: ERROR nova.compute.manager [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 597.872350] env[62096]: ERROR nova.compute.manager [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] nwinfo = self.network_api.allocate_for_instance( [ 597.872350] env[62096]: ERROR nova.compute.manager [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 597.872350] env[62096]: ERROR nova.compute.manager [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] created_port_ids = self._update_ports_for_instance( [ 597.872730] env[62096]: ERROR nova.compute.manager [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 597.872730] env[62096]: ERROR nova.compute.manager [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] with excutils.save_and_reraise_exception(): [ 597.872730] env[62096]: ERROR nova.compute.manager [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 597.872730] env[62096]: ERROR nova.compute.manager [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] self.force_reraise() [ 597.872730] env[62096]: ERROR nova.compute.manager [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 597.872730] env[62096]: ERROR nova.compute.manager [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] raise self.value [ 597.872730] env[62096]: ERROR nova.compute.manager [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 597.872730] env[62096]: ERROR nova.compute.manager [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] updated_port = self._update_port( [ 597.872730] env[62096]: ERROR nova.compute.manager [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 597.872730] env[62096]: ERROR nova.compute.manager [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] _ensure_no_port_binding_failure(port) [ 597.872730] env[62096]: ERROR nova.compute.manager [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 597.872730] env[62096]: ERROR nova.compute.manager [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] raise exception.PortBindingFailed(port_id=port['id']) [ 597.873101] env[62096]: ERROR nova.compute.manager [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] nova.exception.PortBindingFailed: Binding failed for port 23cb598f-2b29-4f1e-ba72-c92aa3797f30, please check neutron logs for more information. [ 597.873101] env[62096]: ERROR nova.compute.manager [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] [ 597.873101] env[62096]: INFO nova.compute.manager [None req-bff733c1-b59e-456d-a598-27648ab77c9f tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] Terminating instance [ 597.873511] env[62096]: DEBUG oslo_concurrency.lockutils [None req-bff733c1-b59e-456d-a598-27648ab77c9f tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Acquiring lock "refresh_cache-6ac3df26-83ab-4519-a1e2-51286c1d1991" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 598.077077] env[62096]: DEBUG oslo_concurrency.lockutils [None req-298b76a9-0fa7-45b3-84c9-138485632e29 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Releasing lock "refresh_cache-94fbc312-96fb-4b62-adc7-18053d509eca" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 598.077331] env[62096]: DEBUG nova.compute.manager [None req-298b76a9-0fa7-45b3-84c9-138485632e29 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 598.077523] env[62096]: DEBUG nova.compute.manager [None req-298b76a9-0fa7-45b3-84c9-138485632e29 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 598.077690] env[62096]: DEBUG nova.network.neutron [None req-298b76a9-0fa7-45b3-84c9-138485632e29 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 598.108845] env[62096]: DEBUG nova.network.neutron [None req-298b76a9-0fa7-45b3-84c9-138485632e29 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 598.109458] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-298b76a9-0fa7-45b3-84c9-138485632e29 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Expecting reply to msg 7fb3e2af21534e98b05ed6b22282f3c7 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 598.116568] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7fb3e2af21534e98b05ed6b22282f3c7 [ 598.208332] env[62096]: DEBUG nova.network.neutron [req-a4e6ccb8-0af6-4654-8532-e1d55af0531a req-72e661e3-6948-4ae7-aa8b-a4e2d5239025 service nova] [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 598.308486] env[62096]: DEBUG nova.network.neutron [req-a4e6ccb8-0af6-4654-8532-e1d55af0531a req-72e661e3-6948-4ae7-aa8b-a4e2d5239025 service nova] [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 598.313058] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-a4e6ccb8-0af6-4654-8532-e1d55af0531a req-72e661e3-6948-4ae7-aa8b-a4e2d5239025 service nova] Expecting reply to msg 252d1a9ffba944fb9dae0682f6f0c69c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 598.325837] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 252d1a9ffba944fb9dae0682f6f0c69c [ 598.440114] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62b29b7b-6970-4975-89c7-b10bb49be720 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.449611] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0c3c360-05a9-4d4c-8e60-fc0b7d98b7a1 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.493383] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81dfc6ea-3b40-4d07-b0c4-0022cdbe6921 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.503372] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-701fe3f1-fda0-4902-ac14-f649de573f2a {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.520513] env[62096]: DEBUG nova.compute.provider_tree [None req-8703d55f-3f3e-42bb-b5d4-8086e8f02c90 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 598.521294] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-8703d55f-3f3e-42bb-b5d4-8086e8f02c90 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Expecting reply to msg 8af93292a5604c9e9d9026ff9f9905f6 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 598.528550] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8af93292a5604c9e9d9026ff9f9905f6 [ 598.611503] env[62096]: DEBUG nova.network.neutron [None req-298b76a9-0fa7-45b3-84c9-138485632e29 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 598.612117] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-298b76a9-0fa7-45b3-84c9-138485632e29 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Expecting reply to msg f00ab7af92d9443280b4f02bc836e3af in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 598.623134] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f00ab7af92d9443280b4f02bc836e3af [ 598.814405] env[62096]: DEBUG oslo_concurrency.lockutils [req-a4e6ccb8-0af6-4654-8532-e1d55af0531a req-72e661e3-6948-4ae7-aa8b-a4e2d5239025 service nova] Releasing lock "refresh_cache-6ac3df26-83ab-4519-a1e2-51286c1d1991" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 598.814746] env[62096]: DEBUG oslo_concurrency.lockutils [None req-bff733c1-b59e-456d-a598-27648ab77c9f tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Acquired lock "refresh_cache-6ac3df26-83ab-4519-a1e2-51286c1d1991" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 598.814933] env[62096]: DEBUG nova.network.neutron [None req-bff733c1-b59e-456d-a598-27648ab77c9f tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 598.815380] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-bff733c1-b59e-456d-a598-27648ab77c9f tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Expecting reply to msg 984312f1d41b4b0ea0dba0c080e9c71a in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 598.822260] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 984312f1d41b4b0ea0dba0c080e9c71a [ 599.024634] env[62096]: DEBUG nova.scheduler.client.report [None req-8703d55f-3f3e-42bb-b5d4-8086e8f02c90 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 599.027197] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-8703d55f-3f3e-42bb-b5d4-8086e8f02c90 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Expecting reply to msg 57d0db4aa4304d91a8ee581ec444c571 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 599.040312] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 57d0db4aa4304d91a8ee581ec444c571 [ 599.114716] env[62096]: INFO nova.compute.manager [None req-298b76a9-0fa7-45b3-84c9-138485632e29 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] [instance: 94fbc312-96fb-4b62-adc7-18053d509eca] Took 1.04 seconds to deallocate network for instance. [ 599.116595] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-298b76a9-0fa7-45b3-84c9-138485632e29 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Expecting reply to msg 68b9bb5f7972413fb2e5a96465d6e0db in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 599.177298] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 68b9bb5f7972413fb2e5a96465d6e0db [ 599.306847] env[62096]: DEBUG oslo_concurrency.lockutils [None req-31582930-cf6e-43cb-a675-23470985f47b tempest-ServerActionsV293TestJSON-192366267 tempest-ServerActionsV293TestJSON-192366267-project-member] Acquiring lock "03570c3d-3ca5-495d-8a52-2f86b280f667" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 599.307110] env[62096]: DEBUG oslo_concurrency.lockutils [None req-31582930-cf6e-43cb-a675-23470985f47b tempest-ServerActionsV293TestJSON-192366267 tempest-ServerActionsV293TestJSON-192366267-project-member] Lock "03570c3d-3ca5-495d-8a52-2f86b280f667" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 599.343499] env[62096]: DEBUG nova.network.neutron [None req-bff733c1-b59e-456d-a598-27648ab77c9f tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 599.403925] env[62096]: DEBUG nova.network.neutron [None req-bff733c1-b59e-456d-a598-27648ab77c9f tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 599.403925] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-bff733c1-b59e-456d-a598-27648ab77c9f tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Expecting reply to msg 5fe3476f354f4c9180732bfdecfc726b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 599.415702] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5fe3476f354f4c9180732bfdecfc726b [ 599.530250] env[62096]: DEBUG oslo_concurrency.lockutils [None req-8703d55f-3f3e-42bb-b5d4-8086e8f02c90 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.659s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 599.530826] env[62096]: DEBUG nova.compute.manager [None req-8703d55f-3f3e-42bb-b5d4-8086e8f02c90 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] Start building networks asynchronously for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 599.532734] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-8703d55f-3f3e-42bb-b5d4-8086e8f02c90 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Expecting reply to msg 49a1b3f00cdb4afe8969d1eb716cd0cd in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 599.533748] env[62096]: DEBUG oslo_concurrency.lockutils [None req-d85fd46d-4576-455c-a571-1e0033669f66 tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 13.581s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 599.535445] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d85fd46d-4576-455c-a571-1e0033669f66 tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Expecting reply to msg 1fda636135f143eebd52547876ecd36a in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 599.578927] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 49a1b3f00cdb4afe8969d1eb716cd0cd [ 599.581206] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1fda636135f143eebd52547876ecd36a [ 599.621142] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-298b76a9-0fa7-45b3-84c9-138485632e29 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Expecting reply to msg fa149c9dd07649719f438ac7b1f81a91 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 599.656130] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fa149c9dd07649719f438ac7b1f81a91 [ 599.905612] env[62096]: DEBUG oslo_concurrency.lockutils [None req-bff733c1-b59e-456d-a598-27648ab77c9f tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Releasing lock "refresh_cache-6ac3df26-83ab-4519-a1e2-51286c1d1991" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 599.906043] env[62096]: DEBUG nova.compute.manager [None req-bff733c1-b59e-456d-a598-27648ab77c9f tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] Start destroying the instance on the hypervisor. {{(pid=62096) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 599.906239] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-bff733c1-b59e-456d-a598-27648ab77c9f tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] Destroying instance {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 599.906537] env[62096]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-79f2f5ed-1661-47a6-be40-81438b8442dd {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.916431] env[62096]: DEBUG oslo_concurrency.lockutils [None req-5b2f59bb-27e5-498c-aa13-1fed23947587 tempest-ServerRescueTestJSON-190058497 tempest-ServerRescueTestJSON-190058497-project-member] Acquiring lock "d38be540-9cd0-428a-b10d-313d2d464b25" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 599.916513] env[62096]: DEBUG oslo_concurrency.lockutils [None req-5b2f59bb-27e5-498c-aa13-1fed23947587 tempest-ServerRescueTestJSON-190058497 tempest-ServerRescueTestJSON-190058497-project-member] Lock "d38be540-9cd0-428a-b10d-313d2d464b25" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 599.919911] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c3a7927-93bf-4434-97c0-66b0617ab0f7 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.950459] env[62096]: WARNING nova.virt.vmwareapi.vmops [None req-bff733c1-b59e-456d-a598-27648ab77c9f tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 6ac3df26-83ab-4519-a1e2-51286c1d1991 could not be found. [ 599.950682] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-bff733c1-b59e-456d-a598-27648ab77c9f tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] Instance destroyed {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 599.950950] env[62096]: INFO nova.compute.manager [None req-bff733c1-b59e-456d-a598-27648ab77c9f tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] Took 0.04 seconds to destroy the instance on the hypervisor. [ 599.951166] env[62096]: DEBUG oslo.service.loopingcall [None req-bff733c1-b59e-456d-a598-27648ab77c9f tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 599.951358] env[62096]: DEBUG nova.compute.manager [-] [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 599.951716] env[62096]: DEBUG nova.network.neutron [-] [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 599.979601] env[62096]: DEBUG nova.network.neutron [-] [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 599.980160] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 732c1eadd8524accbeb6986bd7f37f9b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 599.989164] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 732c1eadd8524accbeb6986bd7f37f9b [ 600.043903] env[62096]: DEBUG nova.compute.utils [None req-8703d55f-3f3e-42bb-b5d4-8086e8f02c90 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Using /dev/sd instead of None {{(pid=62096) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 600.043903] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-8703d55f-3f3e-42bb-b5d4-8086e8f02c90 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Expecting reply to msg a66bcfbdf03e47b2a97f6cb622769ed6 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 600.044548] env[62096]: DEBUG nova.compute.manager [None req-8703d55f-3f3e-42bb-b5d4-8086e8f02c90 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] Allocating IP information in the background. {{(pid=62096) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 600.044810] env[62096]: DEBUG nova.network.neutron [None req-8703d55f-3f3e-42bb-b5d4-8086e8f02c90 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] allocate_for_instance() {{(pid=62096) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 600.056665] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a66bcfbdf03e47b2a97f6cb622769ed6 [ 600.097861] env[62096]: DEBUG nova.policy [None req-8703d55f-3f3e-42bb-b5d4-8086e8f02c90 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '31140eb3c85f4ad9a37461a052bf1c0b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '52cd4fa24de5421eb019ce25d3cb3c79', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62096) authorize /opt/stack/nova/nova/policy.py:203}} [ 600.144427] env[62096]: INFO nova.scheduler.client.report [None req-298b76a9-0fa7-45b3-84c9-138485632e29 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Deleted allocations for instance 94fbc312-96fb-4b62-adc7-18053d509eca [ 600.155073] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-298b76a9-0fa7-45b3-84c9-138485632e29 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Expecting reply to msg ece2ce7bf26642a1a6d2bfdfc34e0620 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 600.170157] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ece2ce7bf26642a1a6d2bfdfc34e0620 [ 600.425363] env[62096]: DEBUG nova.network.neutron [None req-8703d55f-3f3e-42bb-b5d4-8086e8f02c90 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] Successfully created port: e88b5018-a026-4377-aaa2-562d1c2db094 {{(pid=62096) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 600.486761] env[62096]: DEBUG nova.network.neutron [-] [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 600.486761] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 247313dfd4d340dcbdbc5d37174fcbef in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 600.495168] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 247313dfd4d340dcbdbc5d37174fcbef [ 600.510801] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7602a714-0a9b-462b-9af4-2d78824fbd7f {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.519535] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-598314dd-757a-4b76-a6ae-2d2ed5f92165 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.551742] env[62096]: DEBUG nova.compute.manager [None req-8703d55f-3f3e-42bb-b5d4-8086e8f02c90 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] Start building block device mappings for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 600.553628] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-8703d55f-3f3e-42bb-b5d4-8086e8f02c90 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Expecting reply to msg 499baf23b18143cb8e15f2a819aaf543 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 600.555512] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e116097c-38ba-4d3f-992d-3453f1f5e41a {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.566922] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-455bec1e-a828-4ae0-8a97-53ae78350e33 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.582181] env[62096]: DEBUG nova.compute.provider_tree [None req-d85fd46d-4576-455c-a571-1e0033669f66 tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 600.582748] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d85fd46d-4576-455c-a571-1e0033669f66 tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Expecting reply to msg de554d227ca74f61b2fb9680fcde6c4c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 600.589812] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 499baf23b18143cb8e15f2a819aaf543 [ 600.591340] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg de554d227ca74f61b2fb9680fcde6c4c [ 600.657759] env[62096]: DEBUG oslo_concurrency.lockutils [None req-298b76a9-0fa7-45b3-84c9-138485632e29 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Lock "94fbc312-96fb-4b62-adc7-18053d509eca" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 53.890s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 600.658739] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a7dac61a-a9fc-4db2-bd21-9f05c56d2478 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Expecting reply to msg 907dc1a329d24227baa87c509994be53 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 600.682097] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 907dc1a329d24227baa87c509994be53 [ 600.692768] env[62096]: DEBUG nova.compute.manager [req-d4fae258-63dc-4bfe-9695-f9ff71a18d58 req-affa0ada-a579-489c-a1ec-1f2c34a3cde0 service nova] [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] Received event network-vif-deleted-23cb598f-2b29-4f1e-ba72-c92aa3797f30 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 600.988531] env[62096]: INFO nova.compute.manager [-] [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] Took 1.04 seconds to deallocate network for instance. [ 600.991048] env[62096]: DEBUG nova.compute.claims [None req-bff733c1-b59e-456d-a598-27648ab77c9f tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] Aborting claim: {{(pid=62096) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 600.991224] env[62096]: DEBUG oslo_concurrency.lockutils [None req-bff733c1-b59e-456d-a598-27648ab77c9f tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 601.061453] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-8703d55f-3f3e-42bb-b5d4-8086e8f02c90 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Expecting reply to msg 6cd7a51d7e2f495784b471e1ecc6d150 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 601.085988] env[62096]: DEBUG nova.scheduler.client.report [None req-d85fd46d-4576-455c-a571-1e0033669f66 tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 601.088391] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d85fd46d-4576-455c-a571-1e0033669f66 tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Expecting reply to msg 3c64338b85e644beb019a065f40b09ee in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 601.101650] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6cd7a51d7e2f495784b471e1ecc6d150 [ 601.106545] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3c64338b85e644beb019a065f40b09ee [ 601.164235] env[62096]: DEBUG nova.compute.manager [None req-a7dac61a-a9fc-4db2-bd21-9f05c56d2478 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] Starting instance... {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 601.164235] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a7dac61a-a9fc-4db2-bd21-9f05c56d2478 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Expecting reply to msg 6cd03add05df4e879927316277ddbafc in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 601.203024] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6cd03add05df4e879927316277ddbafc [ 601.311779] env[62096]: DEBUG oslo_concurrency.lockutils [None req-d841432a-bb6a-4a14-a0d1-d583a8a5e9c6 tempest-ServerAddressesNegativeTestJSON-1360856578 tempest-ServerAddressesNegativeTestJSON-1360856578-project-member] Acquiring lock "58df043b-ab2f-4e78-8bba-084fe53d3d8e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 601.312453] env[62096]: DEBUG oslo_concurrency.lockutils [None req-d841432a-bb6a-4a14-a0d1-d583a8a5e9c6 tempest-ServerAddressesNegativeTestJSON-1360856578 tempest-ServerAddressesNegativeTestJSON-1360856578-project-member] Lock "58df043b-ab2f-4e78-8bba-084fe53d3d8e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 601.565168] env[62096]: DEBUG nova.compute.manager [None req-8703d55f-3f3e-42bb-b5d4-8086e8f02c90 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] Start spawning the instance on the hypervisor. {{(pid=62096) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 601.591741] env[62096]: DEBUG nova.virt.hardware [None req-8703d55f-3f3e-42bb-b5d4-8086e8f02c90 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-19T11:52:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-19T11:51:55Z,direct_url=,disk_format='vmdk',id=fb764baa-3805-45c4-a694-aa91b0932110,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bd76085d004b408bbcf1ebf76ba2593e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-19T11:51:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 601.592064] env[62096]: DEBUG nova.virt.hardware [None req-8703d55f-3f3e-42bb-b5d4-8086e8f02c90 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Flavor limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 601.592229] env[62096]: DEBUG nova.virt.hardware [None req-8703d55f-3f3e-42bb-b5d4-8086e8f02c90 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Image limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 601.592406] env[62096]: DEBUG nova.virt.hardware [None req-8703d55f-3f3e-42bb-b5d4-8086e8f02c90 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Flavor pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 601.592543] env[62096]: DEBUG nova.virt.hardware [None req-8703d55f-3f3e-42bb-b5d4-8086e8f02c90 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Image pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 601.592681] env[62096]: DEBUG nova.virt.hardware [None req-8703d55f-3f3e-42bb-b5d4-8086e8f02c90 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 601.592883] env[62096]: DEBUG nova.virt.hardware [None req-8703d55f-3f3e-42bb-b5d4-8086e8f02c90 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 601.593031] env[62096]: DEBUG nova.virt.hardware [None req-8703d55f-3f3e-42bb-b5d4-8086e8f02c90 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 601.593656] env[62096]: DEBUG nova.virt.hardware [None req-8703d55f-3f3e-42bb-b5d4-8086e8f02c90 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Got 1 possible topologies {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 601.593911] env[62096]: DEBUG nova.virt.hardware [None req-8703d55f-3f3e-42bb-b5d4-8086e8f02c90 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 601.594112] env[62096]: DEBUG nova.virt.hardware [None req-8703d55f-3f3e-42bb-b5d4-8086e8f02c90 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 601.594844] env[62096]: DEBUG oslo_concurrency.lockutils [None req-d85fd46d-4576-455c-a571-1e0033669f66 tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.061s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 601.595441] env[62096]: ERROR nova.compute.manager [None req-d85fd46d-4576-455c-a571-1e0033669f66 tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port b07a03a0-8320-49d6-93ef-3631d9a04a2a, please check neutron logs for more information. [ 601.595441] env[62096]: ERROR nova.compute.manager [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] Traceback (most recent call last): [ 601.595441] env[62096]: ERROR nova.compute.manager [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 601.595441] env[62096]: ERROR nova.compute.manager [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] self.driver.spawn(context, instance, image_meta, [ 601.595441] env[62096]: ERROR nova.compute.manager [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 601.595441] env[62096]: ERROR nova.compute.manager [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 601.595441] env[62096]: ERROR nova.compute.manager [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 601.595441] env[62096]: ERROR nova.compute.manager [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] vm_ref = self.build_virtual_machine(instance, [ 601.595441] env[62096]: ERROR nova.compute.manager [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 601.595441] env[62096]: ERROR nova.compute.manager [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] vif_infos = vmwarevif.get_vif_info(self._session, [ 601.595441] env[62096]: ERROR nova.compute.manager [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 601.595722] env[62096]: ERROR nova.compute.manager [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] for vif in network_info: [ 601.595722] env[62096]: ERROR nova.compute.manager [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 601.595722] env[62096]: ERROR nova.compute.manager [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] return self._sync_wrapper(fn, *args, **kwargs) [ 601.595722] env[62096]: ERROR nova.compute.manager [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 601.595722] env[62096]: ERROR nova.compute.manager [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] self.wait() [ 601.595722] env[62096]: ERROR nova.compute.manager [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 601.595722] env[62096]: ERROR nova.compute.manager [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] self[:] = self._gt.wait() [ 601.595722] env[62096]: ERROR nova.compute.manager [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 601.595722] env[62096]: ERROR nova.compute.manager [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] return self._exit_event.wait() [ 601.595722] env[62096]: ERROR nova.compute.manager [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 601.595722] env[62096]: ERROR nova.compute.manager [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] result = hub.switch() [ 601.595722] env[62096]: ERROR nova.compute.manager [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 601.595722] env[62096]: ERROR nova.compute.manager [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] return self.greenlet.switch() [ 601.595981] env[62096]: ERROR nova.compute.manager [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 601.595981] env[62096]: ERROR nova.compute.manager [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] result = function(*args, **kwargs) [ 601.595981] env[62096]: ERROR nova.compute.manager [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 601.595981] env[62096]: ERROR nova.compute.manager [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] return func(*args, **kwargs) [ 601.595981] env[62096]: ERROR nova.compute.manager [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 601.595981] env[62096]: ERROR nova.compute.manager [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] raise e [ 601.595981] env[62096]: ERROR nova.compute.manager [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 601.595981] env[62096]: ERROR nova.compute.manager [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] nwinfo = self.network_api.allocate_for_instance( [ 601.595981] env[62096]: ERROR nova.compute.manager [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 601.595981] env[62096]: ERROR nova.compute.manager [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] created_port_ids = self._update_ports_for_instance( [ 601.595981] env[62096]: ERROR nova.compute.manager [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 601.595981] env[62096]: ERROR nova.compute.manager [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] with excutils.save_and_reraise_exception(): [ 601.595981] env[62096]: ERROR nova.compute.manager [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 601.596305] env[62096]: ERROR nova.compute.manager [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] self.force_reraise() [ 601.596305] env[62096]: ERROR nova.compute.manager [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 601.596305] env[62096]: ERROR nova.compute.manager [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] raise self.value [ 601.596305] env[62096]: ERROR nova.compute.manager [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 601.596305] env[62096]: ERROR nova.compute.manager [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] updated_port = self._update_port( [ 601.596305] env[62096]: ERROR nova.compute.manager [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 601.596305] env[62096]: ERROR nova.compute.manager [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] _ensure_no_port_binding_failure(port) [ 601.596305] env[62096]: ERROR nova.compute.manager [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 601.596305] env[62096]: ERROR nova.compute.manager [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] raise exception.PortBindingFailed(port_id=port['id']) [ 601.596305] env[62096]: ERROR nova.compute.manager [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] nova.exception.PortBindingFailed: Binding failed for port b07a03a0-8320-49d6-93ef-3631d9a04a2a, please check neutron logs for more information. [ 601.596305] env[62096]: ERROR nova.compute.manager [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] [ 601.596540] env[62096]: DEBUG nova.compute.utils [None req-d85fd46d-4576-455c-a571-1e0033669f66 tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] Binding failed for port b07a03a0-8320-49d6-93ef-3631d9a04a2a, please check neutron logs for more information. {{(pid=62096) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 601.598040] env[62096]: DEBUG nova.compute.manager [None req-d85fd46d-4576-455c-a571-1e0033669f66 tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] Build of instance fcdeb1ed-7f21-4338-8964-63d16d275bc2 was re-scheduled: Binding failed for port b07a03a0-8320-49d6-93ef-3631d9a04a2a, please check neutron logs for more information. {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 601.598477] env[62096]: DEBUG nova.compute.manager [None req-d85fd46d-4576-455c-a571-1e0033669f66 tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] Unplugging VIFs for instance {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 601.598694] env[62096]: DEBUG oslo_concurrency.lockutils [None req-d85fd46d-4576-455c-a571-1e0033669f66 tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Acquiring lock "refresh_cache-fcdeb1ed-7f21-4338-8964-63d16d275bc2" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 601.598842] env[62096]: DEBUG oslo_concurrency.lockutils [None req-d85fd46d-4576-455c-a571-1e0033669f66 tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Acquired lock "refresh_cache-fcdeb1ed-7f21-4338-8964-63d16d275bc2" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 601.598995] env[62096]: DEBUG nova.network.neutron [None req-d85fd46d-4576-455c-a571-1e0033669f66 tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 601.599707] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d85fd46d-4576-455c-a571-1e0033669f66 tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Expecting reply to msg ae47ad4099c3489a8eaa8b0701621c4e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 601.601597] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16311682-fc35-461a-9989-0a6bf3c1f203 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.604526] env[62096]: DEBUG oslo_concurrency.lockutils [None req-ba7630fb-4f65-44a7-974c-3e7e0c36eacf tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.719s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 601.610463] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ba7630fb-4f65-44a7-974c-3e7e0c36eacf tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Expecting reply to msg 68094db66e894a1280cc13effcc3f3ca in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 601.616403] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ae47ad4099c3489a8eaa8b0701621c4e [ 601.620839] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0980c0dc-d464-485a-9a56-199b882d54ed {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.647946] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 68094db66e894a1280cc13effcc3f3ca [ 601.682658] env[62096]: DEBUG oslo_concurrency.lockutils [None req-a7dac61a-a9fc-4db2-bd21-9f05c56d2478 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 602.088670] env[62096]: ERROR nova.compute.manager [None req-8703d55f-3f3e-42bb-b5d4-8086e8f02c90 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port e88b5018-a026-4377-aaa2-562d1c2db094, please check neutron logs for more information. [ 602.088670] env[62096]: ERROR nova.compute.manager Traceback (most recent call last): [ 602.088670] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 602.088670] env[62096]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 602.088670] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 602.088670] env[62096]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 602.088670] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 602.088670] env[62096]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 602.088670] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 602.088670] env[62096]: ERROR nova.compute.manager self.force_reraise() [ 602.088670] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 602.088670] env[62096]: ERROR nova.compute.manager raise self.value [ 602.088670] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 602.088670] env[62096]: ERROR nova.compute.manager updated_port = self._update_port( [ 602.088670] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 602.088670] env[62096]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 602.089129] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 602.089129] env[62096]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 602.089129] env[62096]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port e88b5018-a026-4377-aaa2-562d1c2db094, please check neutron logs for more information. [ 602.089129] env[62096]: ERROR nova.compute.manager [ 602.089129] env[62096]: Traceback (most recent call last): [ 602.089129] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 602.089129] env[62096]: listener.cb(fileno) [ 602.089129] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 602.089129] env[62096]: result = function(*args, **kwargs) [ 602.089129] env[62096]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 602.089129] env[62096]: return func(*args, **kwargs) [ 602.089129] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 602.089129] env[62096]: raise e [ 602.089129] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 602.089129] env[62096]: nwinfo = self.network_api.allocate_for_instance( [ 602.089129] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 602.089129] env[62096]: created_port_ids = self._update_ports_for_instance( [ 602.089129] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 602.089129] env[62096]: with excutils.save_and_reraise_exception(): [ 602.089129] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 602.089129] env[62096]: self.force_reraise() [ 602.089129] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 602.089129] env[62096]: raise self.value [ 602.089129] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 602.089129] env[62096]: updated_port = self._update_port( [ 602.089129] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 602.089129] env[62096]: _ensure_no_port_binding_failure(port) [ 602.089129] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 602.089129] env[62096]: raise exception.PortBindingFailed(port_id=port['id']) [ 602.089844] env[62096]: nova.exception.PortBindingFailed: Binding failed for port e88b5018-a026-4377-aaa2-562d1c2db094, please check neutron logs for more information. [ 602.089844] env[62096]: Removing descriptor: 14 [ 602.089844] env[62096]: ERROR nova.compute.manager [None req-8703d55f-3f3e-42bb-b5d4-8086e8f02c90 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port e88b5018-a026-4377-aaa2-562d1c2db094, please check neutron logs for more information. [ 602.089844] env[62096]: ERROR nova.compute.manager [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] Traceback (most recent call last): [ 602.089844] env[62096]: ERROR nova.compute.manager [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 602.089844] env[62096]: ERROR nova.compute.manager [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] yield resources [ 602.089844] env[62096]: ERROR nova.compute.manager [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 602.089844] env[62096]: ERROR nova.compute.manager [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] self.driver.spawn(context, instance, image_meta, [ 602.089844] env[62096]: ERROR nova.compute.manager [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 602.089844] env[62096]: ERROR nova.compute.manager [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 602.089844] env[62096]: ERROR nova.compute.manager [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 602.089844] env[62096]: ERROR nova.compute.manager [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] vm_ref = self.build_virtual_machine(instance, [ 602.090147] env[62096]: ERROR nova.compute.manager [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 602.090147] env[62096]: ERROR nova.compute.manager [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] vif_infos = vmwarevif.get_vif_info(self._session, [ 602.090147] env[62096]: ERROR nova.compute.manager [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 602.090147] env[62096]: ERROR nova.compute.manager [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] for vif in network_info: [ 602.090147] env[62096]: ERROR nova.compute.manager [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 602.090147] env[62096]: ERROR nova.compute.manager [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] return self._sync_wrapper(fn, *args, **kwargs) [ 602.090147] env[62096]: ERROR nova.compute.manager [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 602.090147] env[62096]: ERROR nova.compute.manager [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] self.wait() [ 602.090147] env[62096]: ERROR nova.compute.manager [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 602.090147] env[62096]: ERROR nova.compute.manager [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] self[:] = self._gt.wait() [ 602.090147] env[62096]: ERROR nova.compute.manager [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 602.090147] env[62096]: ERROR nova.compute.manager [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] return self._exit_event.wait() [ 602.090147] env[62096]: ERROR nova.compute.manager [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 602.090466] env[62096]: ERROR nova.compute.manager [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] result = hub.switch() [ 602.090466] env[62096]: ERROR nova.compute.manager [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 602.090466] env[62096]: ERROR nova.compute.manager [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] return self.greenlet.switch() [ 602.090466] env[62096]: ERROR nova.compute.manager [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 602.090466] env[62096]: ERROR nova.compute.manager [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] result = function(*args, **kwargs) [ 602.090466] env[62096]: ERROR nova.compute.manager [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 602.090466] env[62096]: ERROR nova.compute.manager [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] return func(*args, **kwargs) [ 602.090466] env[62096]: ERROR nova.compute.manager [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 602.090466] env[62096]: ERROR nova.compute.manager [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] raise e [ 602.090466] env[62096]: ERROR nova.compute.manager [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 602.090466] env[62096]: ERROR nova.compute.manager [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] nwinfo = self.network_api.allocate_for_instance( [ 602.090466] env[62096]: ERROR nova.compute.manager [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 602.090466] env[62096]: ERROR nova.compute.manager [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] created_port_ids = self._update_ports_for_instance( [ 602.090787] env[62096]: ERROR nova.compute.manager [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 602.090787] env[62096]: ERROR nova.compute.manager [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] with excutils.save_and_reraise_exception(): [ 602.090787] env[62096]: ERROR nova.compute.manager [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 602.090787] env[62096]: ERROR nova.compute.manager [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] self.force_reraise() [ 602.090787] env[62096]: ERROR nova.compute.manager [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 602.090787] env[62096]: ERROR nova.compute.manager [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] raise self.value [ 602.090787] env[62096]: ERROR nova.compute.manager [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 602.090787] env[62096]: ERROR nova.compute.manager [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] updated_port = self._update_port( [ 602.090787] env[62096]: ERROR nova.compute.manager [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 602.090787] env[62096]: ERROR nova.compute.manager [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] _ensure_no_port_binding_failure(port) [ 602.090787] env[62096]: ERROR nova.compute.manager [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 602.090787] env[62096]: ERROR nova.compute.manager [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] raise exception.PortBindingFailed(port_id=port['id']) [ 602.091137] env[62096]: ERROR nova.compute.manager [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] nova.exception.PortBindingFailed: Binding failed for port e88b5018-a026-4377-aaa2-562d1c2db094, please check neutron logs for more information. [ 602.091137] env[62096]: ERROR nova.compute.manager [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] [ 602.091137] env[62096]: INFO nova.compute.manager [None req-8703d55f-3f3e-42bb-b5d4-8086e8f02c90 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] Terminating instance [ 602.091777] env[62096]: DEBUG oslo_concurrency.lockutils [None req-8703d55f-3f3e-42bb-b5d4-8086e8f02c90 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Acquiring lock "refresh_cache-3855c98d-4ade-4f6f-85aa-1297df5a39a9" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 602.091929] env[62096]: DEBUG oslo_concurrency.lockutils [None req-8703d55f-3f3e-42bb-b5d4-8086e8f02c90 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Acquired lock "refresh_cache-3855c98d-4ade-4f6f-85aa-1297df5a39a9" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 602.092104] env[62096]: DEBUG nova.network.neutron [None req-8703d55f-3f3e-42bb-b5d4-8086e8f02c90 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 602.092586] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-8703d55f-3f3e-42bb-b5d4-8086e8f02c90 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Expecting reply to msg 1bc4b001e541463cb8ec35ebb2327d72 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 602.100803] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1bc4b001e541463cb8ec35ebb2327d72 [ 602.142160] env[62096]: DEBUG nova.network.neutron [None req-d85fd46d-4576-455c-a571-1e0033669f66 tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 602.295165] env[62096]: DEBUG nova.network.neutron [None req-d85fd46d-4576-455c-a571-1e0033669f66 tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 602.295677] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d85fd46d-4576-455c-a571-1e0033669f66 tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Expecting reply to msg b5d3a052cc02483c8b345f4109be1fe6 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 602.305923] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b5d3a052cc02483c8b345f4109be1fe6 [ 602.593651] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f45788f8-efe6-471e-bbd2-7620955a9a12 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.616686] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89edcacb-e82f-453c-8fed-3682b55e8a23 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.643657] env[62096]: DEBUG nova.network.neutron [None req-8703d55f-3f3e-42bb-b5d4-8086e8f02c90 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 602.646410] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1642d7ae-b8aa-4a9f-bc33-e3b437452005 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.656789] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-480790e3-1c31-4b14-a65f-9c06d5ae9323 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.682664] env[62096]: DEBUG nova.compute.provider_tree [None req-ba7630fb-4f65-44a7-974c-3e7e0c36eacf tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 602.683211] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ba7630fb-4f65-44a7-974c-3e7e0c36eacf tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Expecting reply to msg 03c92310b5c148a1bcfdaabc2380b40e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 602.695184] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 03c92310b5c148a1bcfdaabc2380b40e [ 602.798327] env[62096]: DEBUG nova.network.neutron [None req-8703d55f-3f3e-42bb-b5d4-8086e8f02c90 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 602.799110] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-8703d55f-3f3e-42bb-b5d4-8086e8f02c90 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Expecting reply to msg 977d0844f6ab4cc6bbfa1639d26ed114 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 602.799919] env[62096]: DEBUG oslo_concurrency.lockutils [None req-d85fd46d-4576-455c-a571-1e0033669f66 tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Releasing lock "refresh_cache-fcdeb1ed-7f21-4338-8964-63d16d275bc2" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 602.800153] env[62096]: DEBUG nova.compute.manager [None req-d85fd46d-4576-455c-a571-1e0033669f66 tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 602.800412] env[62096]: DEBUG nova.compute.manager [None req-d85fd46d-4576-455c-a571-1e0033669f66 tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 602.800481] env[62096]: DEBUG nova.network.neutron [None req-d85fd46d-4576-455c-a571-1e0033669f66 tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 602.807782] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 977d0844f6ab4cc6bbfa1639d26ed114 [ 602.819571] env[62096]: DEBUG nova.network.neutron [None req-d85fd46d-4576-455c-a571-1e0033669f66 tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 602.820135] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d85fd46d-4576-455c-a571-1e0033669f66 tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Expecting reply to msg 16e453ff3da645fa8b7f9a410bed85be in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 602.827637] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 16e453ff3da645fa8b7f9a410bed85be [ 603.055121] env[62096]: DEBUG nova.compute.manager [req-72dd3839-58a3-47c1-a025-4eeea21b081a req-cab8f6ad-d09d-4da7-9e7a-17e6b9eb348a service nova] [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] Received event network-changed-e88b5018-a026-4377-aaa2-562d1c2db094 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 603.055307] env[62096]: DEBUG nova.compute.manager [req-72dd3839-58a3-47c1-a025-4eeea21b081a req-cab8f6ad-d09d-4da7-9e7a-17e6b9eb348a service nova] [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] Refreshing instance network info cache due to event network-changed-e88b5018-a026-4377-aaa2-562d1c2db094. {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 603.055490] env[62096]: DEBUG oslo_concurrency.lockutils [req-72dd3839-58a3-47c1-a025-4eeea21b081a req-cab8f6ad-d09d-4da7-9e7a-17e6b9eb348a service nova] Acquiring lock "refresh_cache-3855c98d-4ade-4f6f-85aa-1297df5a39a9" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 603.195889] env[62096]: DEBUG nova.scheduler.client.report [None req-ba7630fb-4f65-44a7-974c-3e7e0c36eacf tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 603.198347] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ba7630fb-4f65-44a7-974c-3e7e0c36eacf tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Expecting reply to msg ea4dc3405464480f9a8a8516967b0578 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 603.212227] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ea4dc3405464480f9a8a8516967b0578 [ 603.303955] env[62096]: DEBUG oslo_concurrency.lockutils [None req-8703d55f-3f3e-42bb-b5d4-8086e8f02c90 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Releasing lock "refresh_cache-3855c98d-4ade-4f6f-85aa-1297df5a39a9" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 603.304429] env[62096]: DEBUG nova.compute.manager [None req-8703d55f-3f3e-42bb-b5d4-8086e8f02c90 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] Start destroying the instance on the hypervisor. {{(pid=62096) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 603.304682] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-8703d55f-3f3e-42bb-b5d4-8086e8f02c90 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] Destroying instance {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 603.304923] env[62096]: DEBUG oslo_concurrency.lockutils [req-72dd3839-58a3-47c1-a025-4eeea21b081a req-cab8f6ad-d09d-4da7-9e7a-17e6b9eb348a service nova] Acquired lock "refresh_cache-3855c98d-4ade-4f6f-85aa-1297df5a39a9" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 603.305084] env[62096]: DEBUG nova.network.neutron [req-72dd3839-58a3-47c1-a025-4eeea21b081a req-cab8f6ad-d09d-4da7-9e7a-17e6b9eb348a service nova] [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] Refreshing network info cache for port e88b5018-a026-4377-aaa2-562d1c2db094 {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 603.305504] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-72dd3839-58a3-47c1-a025-4eeea21b081a req-cab8f6ad-d09d-4da7-9e7a-17e6b9eb348a service nova] Expecting reply to msg 885f90069d8f4ceca0524231526ce49e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 603.306330] env[62096]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-efed4faa-ea15-424d-ac3a-72a5dde7403d {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.313129] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 885f90069d8f4ceca0524231526ce49e [ 603.317796] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9b00914-9a48-43f4-81ee-d1317394d5c2 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.335876] env[62096]: DEBUG nova.network.neutron [None req-d85fd46d-4576-455c-a571-1e0033669f66 tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 603.336725] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d85fd46d-4576-455c-a571-1e0033669f66 tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Expecting reply to msg 518ee1fa0a5046979f5adb73e73679fa in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 603.345598] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 518ee1fa0a5046979f5adb73e73679fa [ 603.352485] env[62096]: WARNING nova.virt.vmwareapi.vmops [None req-8703d55f-3f3e-42bb-b5d4-8086e8f02c90 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 3855c98d-4ade-4f6f-85aa-1297df5a39a9 could not be found. [ 603.352971] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-8703d55f-3f3e-42bb-b5d4-8086e8f02c90 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] Instance destroyed {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 603.353288] env[62096]: INFO nova.compute.manager [None req-8703d55f-3f3e-42bb-b5d4-8086e8f02c90 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] Took 0.05 seconds to destroy the instance on the hypervisor. [ 603.353626] env[62096]: DEBUG oslo.service.loopingcall [None req-8703d55f-3f3e-42bb-b5d4-8086e8f02c90 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 603.353956] env[62096]: DEBUG nova.compute.manager [-] [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 603.354139] env[62096]: DEBUG nova.network.neutron [-] [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 603.379825] env[62096]: DEBUG nova.network.neutron [-] [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 603.379825] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 14debdb98aae469baee84c5055625d2b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 603.387042] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 14debdb98aae469baee84c5055625d2b [ 603.708167] env[62096]: DEBUG oslo_concurrency.lockutils [None req-ba7630fb-4f65-44a7-974c-3e7e0c36eacf tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.098s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 603.708167] env[62096]: ERROR nova.compute.manager [None req-ba7630fb-4f65-44a7-974c-3e7e0c36eacf tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port f73e7926-66c6-4049-9ef5-d36a5d8654a0, please check neutron logs for more information. [ 603.708167] env[62096]: ERROR nova.compute.manager [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] Traceback (most recent call last): [ 603.708167] env[62096]: ERROR nova.compute.manager [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 603.708167] env[62096]: ERROR nova.compute.manager [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] self.driver.spawn(context, instance, image_meta, [ 603.708167] env[62096]: ERROR nova.compute.manager [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 603.708167] env[62096]: ERROR nova.compute.manager [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 603.708167] env[62096]: ERROR nova.compute.manager [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 603.708167] env[62096]: ERROR nova.compute.manager [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] vm_ref = self.build_virtual_machine(instance, [ 603.708791] env[62096]: ERROR nova.compute.manager [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 603.708791] env[62096]: ERROR nova.compute.manager [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] vif_infos = vmwarevif.get_vif_info(self._session, [ 603.708791] env[62096]: ERROR nova.compute.manager [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 603.708791] env[62096]: ERROR nova.compute.manager [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] for vif in network_info: [ 603.708791] env[62096]: ERROR nova.compute.manager [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 603.708791] env[62096]: ERROR nova.compute.manager [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] return self._sync_wrapper(fn, *args, **kwargs) [ 603.708791] env[62096]: ERROR nova.compute.manager [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 603.708791] env[62096]: ERROR nova.compute.manager [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] self.wait() [ 603.708791] env[62096]: ERROR nova.compute.manager [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 603.708791] env[62096]: ERROR nova.compute.manager [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] self[:] = self._gt.wait() [ 603.708791] env[62096]: ERROR nova.compute.manager [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 603.708791] env[62096]: ERROR nova.compute.manager [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] return self._exit_event.wait() [ 603.708791] env[62096]: ERROR nova.compute.manager [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 603.709057] env[62096]: ERROR nova.compute.manager [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] result = hub.switch() [ 603.709057] env[62096]: ERROR nova.compute.manager [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 603.709057] env[62096]: ERROR nova.compute.manager [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] return self.greenlet.switch() [ 603.709057] env[62096]: ERROR nova.compute.manager [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 603.709057] env[62096]: ERROR nova.compute.manager [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] result = function(*args, **kwargs) [ 603.709057] env[62096]: ERROR nova.compute.manager [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 603.709057] env[62096]: ERROR nova.compute.manager [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] return func(*args, **kwargs) [ 603.709057] env[62096]: ERROR nova.compute.manager [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 603.709057] env[62096]: ERROR nova.compute.manager [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] raise e [ 603.709057] env[62096]: ERROR nova.compute.manager [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 603.709057] env[62096]: ERROR nova.compute.manager [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] nwinfo = self.network_api.allocate_for_instance( [ 603.709057] env[62096]: ERROR nova.compute.manager [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 603.709057] env[62096]: ERROR nova.compute.manager [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] created_port_ids = self._update_ports_for_instance( [ 603.709313] env[62096]: ERROR nova.compute.manager [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 603.709313] env[62096]: ERROR nova.compute.manager [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] with excutils.save_and_reraise_exception(): [ 603.709313] env[62096]: ERROR nova.compute.manager [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 603.709313] env[62096]: ERROR nova.compute.manager [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] self.force_reraise() [ 603.709313] env[62096]: ERROR nova.compute.manager [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 603.709313] env[62096]: ERROR nova.compute.manager [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] raise self.value [ 603.709313] env[62096]: ERROR nova.compute.manager [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 603.709313] env[62096]: ERROR nova.compute.manager [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] updated_port = self._update_port( [ 603.709313] env[62096]: ERROR nova.compute.manager [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 603.709313] env[62096]: ERROR nova.compute.manager [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] _ensure_no_port_binding_failure(port) [ 603.709313] env[62096]: ERROR nova.compute.manager [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 603.709313] env[62096]: ERROR nova.compute.manager [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] raise exception.PortBindingFailed(port_id=port['id']) [ 603.709564] env[62096]: ERROR nova.compute.manager [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] nova.exception.PortBindingFailed: Binding failed for port f73e7926-66c6-4049-9ef5-d36a5d8654a0, please check neutron logs for more information. [ 603.709564] env[62096]: ERROR nova.compute.manager [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] [ 603.709564] env[62096]: DEBUG nova.compute.utils [None req-ba7630fb-4f65-44a7-974c-3e7e0c36eacf tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] Binding failed for port f73e7926-66c6-4049-9ef5-d36a5d8654a0, please check neutron logs for more information. {{(pid=62096) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 603.709564] env[62096]: DEBUG oslo_concurrency.lockutils [None req-dbe77661-ff6b-4cc0-8cbb-e6cfd25c54ab tempest-ServerDiagnosticsTest-1671101402 tempest-ServerDiagnosticsTest-1671101402-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.511s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 603.709564] env[62096]: INFO nova.compute.claims [None req-dbe77661-ff6b-4cc0-8cbb-e6cfd25c54ab tempest-ServerDiagnosticsTest-1671101402 tempest-ServerDiagnosticsTest-1671101402-project-member] [instance: 9bc49e7a-af22-4112-8609-348605599692] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 603.709564] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-dbe77661-ff6b-4cc0-8cbb-e6cfd25c54ab tempest-ServerDiagnosticsTest-1671101402 tempest-ServerDiagnosticsTest-1671101402-project-member] Expecting reply to msg 3c309feeec4546279644115b9b80f76c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 603.711236] env[62096]: DEBUG nova.compute.manager [None req-ba7630fb-4f65-44a7-974c-3e7e0c36eacf tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] Build of instance 1377dbd7-9025-4683-be56-6e8987a5d72e was re-scheduled: Binding failed for port f73e7926-66c6-4049-9ef5-d36a5d8654a0, please check neutron logs for more information. {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 603.711935] env[62096]: DEBUG nova.compute.manager [None req-ba7630fb-4f65-44a7-974c-3e7e0c36eacf tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] Unplugging VIFs for instance {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 603.712383] env[62096]: DEBUG oslo_concurrency.lockutils [None req-ba7630fb-4f65-44a7-974c-3e7e0c36eacf tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Acquiring lock "refresh_cache-1377dbd7-9025-4683-be56-6e8987a5d72e" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 603.712659] env[62096]: DEBUG oslo_concurrency.lockutils [None req-ba7630fb-4f65-44a7-974c-3e7e0c36eacf tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Acquired lock "refresh_cache-1377dbd7-9025-4683-be56-6e8987a5d72e" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 603.712941] env[62096]: DEBUG nova.network.neutron [None req-ba7630fb-4f65-44a7-974c-3e7e0c36eacf tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 603.713420] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ba7630fb-4f65-44a7-974c-3e7e0c36eacf tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Expecting reply to msg 673c2256e1db433e952d41ede9db049a in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 603.720285] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 673c2256e1db433e952d41ede9db049a [ 603.744708] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3c309feeec4546279644115b9b80f76c [ 603.829518] env[62096]: DEBUG nova.network.neutron [req-72dd3839-58a3-47c1-a025-4eeea21b081a req-cab8f6ad-d09d-4da7-9e7a-17e6b9eb348a service nova] [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 603.839126] env[62096]: INFO nova.compute.manager [None req-d85fd46d-4576-455c-a571-1e0033669f66 tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] [instance: fcdeb1ed-7f21-4338-8964-63d16d275bc2] Took 1.04 seconds to deallocate network for instance. [ 603.840812] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d85fd46d-4576-455c-a571-1e0033669f66 tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Expecting reply to msg 3523d3b1d2c94360be5f046f4b99fe3a in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 603.881361] env[62096]: DEBUG nova.network.neutron [-] [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 603.881959] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 7fe9516375514a629b9b8b529d95379a in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 603.897271] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7fe9516375514a629b9b8b529d95379a [ 603.897904] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3523d3b1d2c94360be5f046f4b99fe3a [ 603.997360] env[62096]: DEBUG nova.network.neutron [req-72dd3839-58a3-47c1-a025-4eeea21b081a req-cab8f6ad-d09d-4da7-9e7a-17e6b9eb348a service nova] [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 603.997984] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-72dd3839-58a3-47c1-a025-4eeea21b081a req-cab8f6ad-d09d-4da7-9e7a-17e6b9eb348a service nova] Expecting reply to msg 503f479c68534b41bc2d16fda188f277 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 604.011509] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 503f479c68534b41bc2d16fda188f277 [ 604.226943] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-dbe77661-ff6b-4cc0-8cbb-e6cfd25c54ab tempest-ServerDiagnosticsTest-1671101402 tempest-ServerDiagnosticsTest-1671101402-project-member] Expecting reply to msg 299580d9f35549429aa42482c3d09303 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 604.232577] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 299580d9f35549429aa42482c3d09303 [ 604.244207] env[62096]: DEBUG nova.network.neutron [None req-ba7630fb-4f65-44a7-974c-3e7e0c36eacf tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 604.336630] env[62096]: DEBUG nova.network.neutron [None req-ba7630fb-4f65-44a7-974c-3e7e0c36eacf tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 604.336630] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ba7630fb-4f65-44a7-974c-3e7e0c36eacf tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Expecting reply to msg 3d1d3e8886fe45f4b1d704e169292bc2 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 604.345358] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d85fd46d-4576-455c-a571-1e0033669f66 tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Expecting reply to msg f4139e18d01c4154b17a9cec653f4982 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 604.350385] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3d1d3e8886fe45f4b1d704e169292bc2 [ 604.384144] env[62096]: INFO nova.compute.manager [-] [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] Took 1.03 seconds to deallocate network for instance. [ 604.386810] env[62096]: DEBUG nova.compute.claims [None req-8703d55f-3f3e-42bb-b5d4-8086e8f02c90 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] Aborting claim: {{(pid=62096) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 604.386983] env[62096]: DEBUG oslo_concurrency.lockutils [None req-8703d55f-3f3e-42bb-b5d4-8086e8f02c90 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 604.391755] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f4139e18d01c4154b17a9cec653f4982 [ 604.501009] env[62096]: DEBUG oslo_concurrency.lockutils [req-72dd3839-58a3-47c1-a025-4eeea21b081a req-cab8f6ad-d09d-4da7-9e7a-17e6b9eb348a service nova] Releasing lock "refresh_cache-3855c98d-4ade-4f6f-85aa-1297df5a39a9" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 604.501264] env[62096]: DEBUG nova.compute.manager [req-72dd3839-58a3-47c1-a025-4eeea21b081a req-cab8f6ad-d09d-4da7-9e7a-17e6b9eb348a service nova] [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] Received event network-vif-deleted-e88b5018-a026-4377-aaa2-562d1c2db094 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 604.837981] env[62096]: DEBUG oslo_concurrency.lockutils [None req-ba7630fb-4f65-44a7-974c-3e7e0c36eacf tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Releasing lock "refresh_cache-1377dbd7-9025-4683-be56-6e8987a5d72e" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 604.838230] env[62096]: DEBUG nova.compute.manager [None req-ba7630fb-4f65-44a7-974c-3e7e0c36eacf tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 604.838385] env[62096]: DEBUG nova.compute.manager [None req-ba7630fb-4f65-44a7-974c-3e7e0c36eacf tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 604.839112] env[62096]: DEBUG nova.network.neutron [None req-ba7630fb-4f65-44a7-974c-3e7e0c36eacf tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 604.872659] env[62096]: INFO nova.scheduler.client.report [None req-d85fd46d-4576-455c-a571-1e0033669f66 tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Deleted allocations for instance fcdeb1ed-7f21-4338-8964-63d16d275bc2 [ 604.878708] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d85fd46d-4576-455c-a571-1e0033669f66 tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Expecting reply to msg fb910eeabeac4baa8996e8284f18b8f8 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 604.886808] env[62096]: DEBUG nova.network.neutron [None req-ba7630fb-4f65-44a7-974c-3e7e0c36eacf tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 604.887314] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ba7630fb-4f65-44a7-974c-3e7e0c36eacf tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Expecting reply to msg 00cfe9fcd37a408183e0f41b3fadb0b8 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 604.903563] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fb910eeabeac4baa8996e8284f18b8f8 [ 604.906575] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 00cfe9fcd37a408183e0f41b3fadb0b8 [ 605.255351] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f72a7f49-179a-4546-bef5-195614a770d2 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.264232] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97cada6f-36b1-4252-af39-a5acfe967db0 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.294875] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02f7ec9e-01d2-4daa-b140-976099f022fa {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.302733] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-182539fb-382f-4ffb-83ea-922cb5c6fb6b {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.317670] env[62096]: DEBUG nova.compute.provider_tree [None req-dbe77661-ff6b-4cc0-8cbb-e6cfd25c54ab tempest-ServerDiagnosticsTest-1671101402 tempest-ServerDiagnosticsTest-1671101402-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 605.323177] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-dbe77661-ff6b-4cc0-8cbb-e6cfd25c54ab tempest-ServerDiagnosticsTest-1671101402 tempest-ServerDiagnosticsTest-1671101402-project-member] Expecting reply to msg 428a6838e15347bfb44c5fa020f34569 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 605.327244] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 428a6838e15347bfb44c5fa020f34569 [ 605.380298] env[62096]: DEBUG oslo_concurrency.lockutils [None req-d85fd46d-4576-455c-a571-1e0033669f66 tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Lock "fcdeb1ed-7f21-4338-8964-63d16d275bc2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 57.974s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 605.381072] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b15e5306-4a15-4327-a851-ae7862f80688 tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Expecting reply to msg a9834ed34a614608a77ba1c62adcab6d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 605.391704] env[62096]: DEBUG nova.network.neutron [None req-ba7630fb-4f65-44a7-974c-3e7e0c36eacf tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 605.392243] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ba7630fb-4f65-44a7-974c-3e7e0c36eacf tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Expecting reply to msg 20148d4d0f524e45925edc35cd9b673d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 605.394276] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a9834ed34a614608a77ba1c62adcab6d [ 605.399831] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 20148d4d0f524e45925edc35cd9b673d [ 605.820807] env[62096]: DEBUG nova.scheduler.client.report [None req-dbe77661-ff6b-4cc0-8cbb-e6cfd25c54ab tempest-ServerDiagnosticsTest-1671101402 tempest-ServerDiagnosticsTest-1671101402-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 605.823329] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-dbe77661-ff6b-4cc0-8cbb-e6cfd25c54ab tempest-ServerDiagnosticsTest-1671101402 tempest-ServerDiagnosticsTest-1671101402-project-member] Expecting reply to msg 738473a28f38420a8f7d096f970dc97a in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 605.837342] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 738473a28f38420a8f7d096f970dc97a [ 605.886538] env[62096]: DEBUG nova.compute.manager [None req-b15e5306-4a15-4327-a851-ae7862f80688 tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] Starting instance... {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 605.888263] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b15e5306-4a15-4327-a851-ae7862f80688 tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Expecting reply to msg df5c20362d5f4c8ebb77764115c2737c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 605.894808] env[62096]: INFO nova.compute.manager [None req-ba7630fb-4f65-44a7-974c-3e7e0c36eacf tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] [instance: 1377dbd7-9025-4683-be56-6e8987a5d72e] Took 1.06 seconds to deallocate network for instance. [ 605.896408] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ba7630fb-4f65-44a7-974c-3e7e0c36eacf tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Expecting reply to msg b183a2c591ea437ba76d74ee71c55745 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 605.946360] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b183a2c591ea437ba76d74ee71c55745 [ 605.946975] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg df5c20362d5f4c8ebb77764115c2737c [ 606.330068] env[62096]: DEBUG oslo_concurrency.lockutils [None req-dbe77661-ff6b-4cc0-8cbb-e6cfd25c54ab tempest-ServerDiagnosticsTest-1671101402 tempest-ServerDiagnosticsTest-1671101402-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.625s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 606.330615] env[62096]: DEBUG nova.compute.manager [None req-dbe77661-ff6b-4cc0-8cbb-e6cfd25c54ab tempest-ServerDiagnosticsTest-1671101402 tempest-ServerDiagnosticsTest-1671101402-project-member] [instance: 9bc49e7a-af22-4112-8609-348605599692] Start building networks asynchronously for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 606.332259] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-dbe77661-ff6b-4cc0-8cbb-e6cfd25c54ab tempest-ServerDiagnosticsTest-1671101402 tempest-ServerDiagnosticsTest-1671101402-project-member] Expecting reply to msg 2c117aaa2b504e37b560fe0e6f955fd7 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 606.333329] env[62096]: DEBUG oslo_concurrency.lockutils [None req-c1291365-a5cc-4752-acce-920ad54dbd71 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 16.881s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 606.335002] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c1291365-a5cc-4752-acce-920ad54dbd71 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Expecting reply to msg 1474f603ecfa49da88526250cc993e9a in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 606.366153] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2c117aaa2b504e37b560fe0e6f955fd7 [ 606.374658] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1474f603ecfa49da88526250cc993e9a [ 606.403373] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ba7630fb-4f65-44a7-974c-3e7e0c36eacf tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Expecting reply to msg fdf1bd4c025a4a848dae065798b2a6ab in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 606.411466] env[62096]: DEBUG oslo_concurrency.lockutils [None req-b15e5306-4a15-4327-a851-ae7862f80688 tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 606.440612] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fdf1bd4c025a4a848dae065798b2a6ab [ 606.838052] env[62096]: DEBUG nova.compute.utils [None req-dbe77661-ff6b-4cc0-8cbb-e6cfd25c54ab tempest-ServerDiagnosticsTest-1671101402 tempest-ServerDiagnosticsTest-1671101402-project-member] Using /dev/sd instead of None {{(pid=62096) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 606.839114] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-dbe77661-ff6b-4cc0-8cbb-e6cfd25c54ab tempest-ServerDiagnosticsTest-1671101402 tempest-ServerDiagnosticsTest-1671101402-project-member] Expecting reply to msg d40a37cad0c34a3bb52a5720c1186cdf in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 606.844190] env[62096]: DEBUG nova.compute.manager [None req-dbe77661-ff6b-4cc0-8cbb-e6cfd25c54ab tempest-ServerDiagnosticsTest-1671101402 tempest-ServerDiagnosticsTest-1671101402-project-member] [instance: 9bc49e7a-af22-4112-8609-348605599692] Allocating IP information in the background. {{(pid=62096) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 606.844427] env[62096]: DEBUG nova.network.neutron [None req-dbe77661-ff6b-4cc0-8cbb-e6cfd25c54ab tempest-ServerDiagnosticsTest-1671101402 tempest-ServerDiagnosticsTest-1671101402-project-member] [instance: 9bc49e7a-af22-4112-8609-348605599692] allocate_for_instance() {{(pid=62096) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 606.852029] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d40a37cad0c34a3bb52a5720c1186cdf [ 606.916718] env[62096]: DEBUG nova.policy [None req-dbe77661-ff6b-4cc0-8cbb-e6cfd25c54ab tempest-ServerDiagnosticsTest-1671101402 tempest-ServerDiagnosticsTest-1671101402-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '279c46ba9aa5487c80d51c5156a9e1cd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fb329e86f2f844d59f04acf92c0b9172', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62096) authorize /opt/stack/nova/nova/policy.py:203}} [ 606.931796] env[62096]: INFO nova.scheduler.client.report [None req-ba7630fb-4f65-44a7-974c-3e7e0c36eacf tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Deleted allocations for instance 1377dbd7-9025-4683-be56-6e8987a5d72e [ 606.940542] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ba7630fb-4f65-44a7-974c-3e7e0c36eacf tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Expecting reply to msg 359ebc9ee2854935a5940ed01911a63e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 606.959186] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 359ebc9ee2854935a5940ed01911a63e [ 607.097841] env[62096]: DEBUG oslo_concurrency.lockutils [None req-c8a8569a-9a38-4dba-b714-875ef3e36f76 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Acquiring lock "321ab95b-6221-4bab-b442-a90926098dae" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 607.098325] env[62096]: DEBUG oslo_concurrency.lockutils [None req-c8a8569a-9a38-4dba-b714-875ef3e36f76 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Lock "321ab95b-6221-4bab-b442-a90926098dae" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 607.344152] env[62096]: DEBUG nova.compute.manager [None req-dbe77661-ff6b-4cc0-8cbb-e6cfd25c54ab tempest-ServerDiagnosticsTest-1671101402 tempest-ServerDiagnosticsTest-1671101402-project-member] [instance: 9bc49e7a-af22-4112-8609-348605599692] Start building block device mappings for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 607.345876] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-dbe77661-ff6b-4cc0-8cbb-e6cfd25c54ab tempest-ServerDiagnosticsTest-1671101402 tempest-ServerDiagnosticsTest-1671101402-project-member] Expecting reply to msg 01857060c0734a069f0a67de77a9e3dd in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 607.348961] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1636ba57-00da-4363-8006-4c0831beca36 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.357520] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cb414ef-b17f-4026-b882-88be0180672e {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.393713] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 01857060c0734a069f0a67de77a9e3dd [ 607.394707] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7dcac57-a327-4d09-a546-49d21d6d2b30 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.404129] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e938bbfb-f8a7-4352-8cd4-b46c88b7e41c {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.422149] env[62096]: DEBUG nova.compute.provider_tree [None req-c1291365-a5cc-4752-acce-920ad54dbd71 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 607.422963] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c1291365-a5cc-4752-acce-920ad54dbd71 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Expecting reply to msg 27490f0ffceb4aa3b5a3a6b88f666dc9 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 607.435555] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 27490f0ffceb4aa3b5a3a6b88f666dc9 [ 607.439935] env[62096]: DEBUG oslo_concurrency.lockutils [None req-ba7630fb-4f65-44a7-974c-3e7e0c36eacf tempest-ListImageFiltersTestJSON-1308210824 tempest-ListImageFiltersTestJSON-1308210824-project-member] Lock "1377dbd7-9025-4683-be56-6e8987a5d72e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 57.867s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 607.440754] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b165e3d0-efb4-4eaf-a7d6-1c4c9099d3ba tempest-ImagesOneServerTestJSON-1002718403 tempest-ImagesOneServerTestJSON-1002718403-project-member] Expecting reply to msg 38d04f919abe49a3b81ed3cd6b458237 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 607.458132] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 38d04f919abe49a3b81ed3cd6b458237 [ 607.580039] env[62096]: DEBUG nova.network.neutron [None req-dbe77661-ff6b-4cc0-8cbb-e6cfd25c54ab tempest-ServerDiagnosticsTest-1671101402 tempest-ServerDiagnosticsTest-1671101402-project-member] [instance: 9bc49e7a-af22-4112-8609-348605599692] Successfully created port: e34eb22a-a84c-4ad4-be42-ea372c508428 {{(pid=62096) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 607.851940] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-dbe77661-ff6b-4cc0-8cbb-e6cfd25c54ab tempest-ServerDiagnosticsTest-1671101402 tempest-ServerDiagnosticsTest-1671101402-project-member] Expecting reply to msg 69785a201c2e409cb080719255403e6f in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 607.892061] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 69785a201c2e409cb080719255403e6f [ 607.932267] env[62096]: DEBUG nova.scheduler.client.report [None req-c1291365-a5cc-4752-acce-920ad54dbd71 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 607.932267] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c1291365-a5cc-4752-acce-920ad54dbd71 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Expecting reply to msg 66d136d9b4154da8b7decf9eb108f6d8 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 607.943161] env[62096]: DEBUG nova.compute.manager [None req-b165e3d0-efb4-4eaf-a7d6-1c4c9099d3ba tempest-ImagesOneServerTestJSON-1002718403 tempest-ImagesOneServerTestJSON-1002718403-project-member] [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] Starting instance... {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 607.944965] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b165e3d0-efb4-4eaf-a7d6-1c4c9099d3ba tempest-ImagesOneServerTestJSON-1002718403 tempest-ImagesOneServerTestJSON-1002718403-project-member] Expecting reply to msg 3e6e60be5a75418cb45cc4f36a170a8c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 607.947434] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 66d136d9b4154da8b7decf9eb108f6d8 [ 607.978799] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3e6e60be5a75418cb45cc4f36a170a8c [ 608.357304] env[62096]: DEBUG nova.compute.manager [None req-dbe77661-ff6b-4cc0-8cbb-e6cfd25c54ab tempest-ServerDiagnosticsTest-1671101402 tempest-ServerDiagnosticsTest-1671101402-project-member] [instance: 9bc49e7a-af22-4112-8609-348605599692] Start spawning the instance on the hypervisor. {{(pid=62096) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 608.380154] env[62096]: DEBUG nova.virt.hardware [None req-dbe77661-ff6b-4cc0-8cbb-e6cfd25c54ab tempest-ServerDiagnosticsTest-1671101402 tempest-ServerDiagnosticsTest-1671101402-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-19T11:52:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-19T11:51:55Z,direct_url=,disk_format='vmdk',id=fb764baa-3805-45c4-a694-aa91b0932110,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bd76085d004b408bbcf1ebf76ba2593e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-19T11:51:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 608.380611] env[62096]: DEBUG nova.virt.hardware [None req-dbe77661-ff6b-4cc0-8cbb-e6cfd25c54ab tempest-ServerDiagnosticsTest-1671101402 tempest-ServerDiagnosticsTest-1671101402-project-member] Flavor limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 608.380929] env[62096]: DEBUG nova.virt.hardware [None req-dbe77661-ff6b-4cc0-8cbb-e6cfd25c54ab tempest-ServerDiagnosticsTest-1671101402 tempest-ServerDiagnosticsTest-1671101402-project-member] Image limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 608.381291] env[62096]: DEBUG nova.virt.hardware [None req-dbe77661-ff6b-4cc0-8cbb-e6cfd25c54ab tempest-ServerDiagnosticsTest-1671101402 tempest-ServerDiagnosticsTest-1671101402-project-member] Flavor pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 608.381589] env[62096]: DEBUG nova.virt.hardware [None req-dbe77661-ff6b-4cc0-8cbb-e6cfd25c54ab tempest-ServerDiagnosticsTest-1671101402 tempest-ServerDiagnosticsTest-1671101402-project-member] Image pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 608.381999] env[62096]: DEBUG nova.virt.hardware [None req-dbe77661-ff6b-4cc0-8cbb-e6cfd25c54ab tempest-ServerDiagnosticsTest-1671101402 tempest-ServerDiagnosticsTest-1671101402-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 608.382349] env[62096]: DEBUG nova.virt.hardware [None req-dbe77661-ff6b-4cc0-8cbb-e6cfd25c54ab tempest-ServerDiagnosticsTest-1671101402 tempest-ServerDiagnosticsTest-1671101402-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 608.382761] env[62096]: DEBUG nova.virt.hardware [None req-dbe77661-ff6b-4cc0-8cbb-e6cfd25c54ab tempest-ServerDiagnosticsTest-1671101402 tempest-ServerDiagnosticsTest-1671101402-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 608.383087] env[62096]: DEBUG nova.virt.hardware [None req-dbe77661-ff6b-4cc0-8cbb-e6cfd25c54ab tempest-ServerDiagnosticsTest-1671101402 tempest-ServerDiagnosticsTest-1671101402-project-member] Got 1 possible topologies {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 608.383520] env[62096]: DEBUG nova.virt.hardware [None req-dbe77661-ff6b-4cc0-8cbb-e6cfd25c54ab tempest-ServerDiagnosticsTest-1671101402 tempest-ServerDiagnosticsTest-1671101402-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 608.383896] env[62096]: DEBUG nova.virt.hardware [None req-dbe77661-ff6b-4cc0-8cbb-e6cfd25c54ab tempest-ServerDiagnosticsTest-1671101402 tempest-ServerDiagnosticsTest-1671101402-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 608.385529] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fad20d5-c166-4684-bcd2-29733f33cada {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.401818] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2583e4f8-ed16-4965-9042-bde91c143aaa {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.435881] env[62096]: DEBUG oslo_concurrency.lockutils [None req-c1291365-a5cc-4752-acce-920ad54dbd71 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.102s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 608.436743] env[62096]: ERROR nova.compute.manager [None req-c1291365-a5cc-4752-acce-920ad54dbd71 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 6385cb29-db63-485a-90be-0c7ea8677787, please check neutron logs for more information. [ 608.436743] env[62096]: ERROR nova.compute.manager [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] Traceback (most recent call last): [ 608.436743] env[62096]: ERROR nova.compute.manager [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 608.436743] env[62096]: ERROR nova.compute.manager [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] self.driver.spawn(context, instance, image_meta, [ 608.436743] env[62096]: ERROR nova.compute.manager [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 608.436743] env[62096]: ERROR nova.compute.manager [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] self._vmops.spawn(context, instance, image_meta, injected_files, [ 608.436743] env[62096]: ERROR nova.compute.manager [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 608.436743] env[62096]: ERROR nova.compute.manager [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] vm_ref = self.build_virtual_machine(instance, [ 608.436743] env[62096]: ERROR nova.compute.manager [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 608.436743] env[62096]: ERROR nova.compute.manager [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] vif_infos = vmwarevif.get_vif_info(self._session, [ 608.436743] env[62096]: ERROR nova.compute.manager [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 608.437071] env[62096]: ERROR nova.compute.manager [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] for vif in network_info: [ 608.437071] env[62096]: ERROR nova.compute.manager [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 608.437071] env[62096]: ERROR nova.compute.manager [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] return self._sync_wrapper(fn, *args, **kwargs) [ 608.437071] env[62096]: ERROR nova.compute.manager [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 608.437071] env[62096]: ERROR nova.compute.manager [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] self.wait() [ 608.437071] env[62096]: ERROR nova.compute.manager [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 608.437071] env[62096]: ERROR nova.compute.manager [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] self[:] = self._gt.wait() [ 608.437071] env[62096]: ERROR nova.compute.manager [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 608.437071] env[62096]: ERROR nova.compute.manager [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] return self._exit_event.wait() [ 608.437071] env[62096]: ERROR nova.compute.manager [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 608.437071] env[62096]: ERROR nova.compute.manager [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] result = hub.switch() [ 608.437071] env[62096]: ERROR nova.compute.manager [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 608.437071] env[62096]: ERROR nova.compute.manager [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] return self.greenlet.switch() [ 608.437372] env[62096]: ERROR nova.compute.manager [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 608.437372] env[62096]: ERROR nova.compute.manager [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] result = function(*args, **kwargs) [ 608.437372] env[62096]: ERROR nova.compute.manager [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 608.437372] env[62096]: ERROR nova.compute.manager [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] return func(*args, **kwargs) [ 608.437372] env[62096]: ERROR nova.compute.manager [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 608.437372] env[62096]: ERROR nova.compute.manager [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] raise e [ 608.437372] env[62096]: ERROR nova.compute.manager [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 608.437372] env[62096]: ERROR nova.compute.manager [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] nwinfo = self.network_api.allocate_for_instance( [ 608.437372] env[62096]: ERROR nova.compute.manager [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 608.437372] env[62096]: ERROR nova.compute.manager [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] created_port_ids = self._update_ports_for_instance( [ 608.437372] env[62096]: ERROR nova.compute.manager [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 608.437372] env[62096]: ERROR nova.compute.manager [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] with excutils.save_and_reraise_exception(): [ 608.437372] env[62096]: ERROR nova.compute.manager [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 608.437655] env[62096]: ERROR nova.compute.manager [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] self.force_reraise() [ 608.437655] env[62096]: ERROR nova.compute.manager [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 608.437655] env[62096]: ERROR nova.compute.manager [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] raise self.value [ 608.437655] env[62096]: ERROR nova.compute.manager [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 608.437655] env[62096]: ERROR nova.compute.manager [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] updated_port = self._update_port( [ 608.437655] env[62096]: ERROR nova.compute.manager [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 608.437655] env[62096]: ERROR nova.compute.manager [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] _ensure_no_port_binding_failure(port) [ 608.437655] env[62096]: ERROR nova.compute.manager [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 608.437655] env[62096]: ERROR nova.compute.manager [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] raise exception.PortBindingFailed(port_id=port['id']) [ 608.437655] env[62096]: ERROR nova.compute.manager [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] nova.exception.PortBindingFailed: Binding failed for port 6385cb29-db63-485a-90be-0c7ea8677787, please check neutron logs for more information. [ 608.437655] env[62096]: ERROR nova.compute.manager [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] [ 608.437896] env[62096]: DEBUG nova.compute.utils [None req-c1291365-a5cc-4752-acce-920ad54dbd71 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] Binding failed for port 6385cb29-db63-485a-90be-0c7ea8677787, please check neutron logs for more information. {{(pid=62096) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 608.441051] env[62096]: DEBUG oslo_concurrency.lockutils [None req-19c9aad9-d898-4c0f-b588-f60abf561c62 tempest-ServersTestBootFromVolume-2123063213 tempest-ServersTestBootFromVolume-2123063213-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 13.952s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 608.443556] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-19c9aad9-d898-4c0f-b588-f60abf561c62 tempest-ServersTestBootFromVolume-2123063213 tempest-ServersTestBootFromVolume-2123063213-project-member] Expecting reply to msg 149e8e47b612443f9fba6e4580c7e40c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 608.445591] env[62096]: DEBUG nova.compute.manager [None req-c1291365-a5cc-4752-acce-920ad54dbd71 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] Build of instance 09410bd9-aa4e-49a8-86fb-8058b842bd72 was re-scheduled: Binding failed for port 6385cb29-db63-485a-90be-0c7ea8677787, please check neutron logs for more information. {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 608.446051] env[62096]: DEBUG nova.compute.manager [None req-c1291365-a5cc-4752-acce-920ad54dbd71 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] Unplugging VIFs for instance {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 608.446284] env[62096]: DEBUG oslo_concurrency.lockutils [None req-c1291365-a5cc-4752-acce-920ad54dbd71 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Acquiring lock "refresh_cache-09410bd9-aa4e-49a8-86fb-8058b842bd72" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 608.446428] env[62096]: DEBUG oslo_concurrency.lockutils [None req-c1291365-a5cc-4752-acce-920ad54dbd71 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Acquired lock "refresh_cache-09410bd9-aa4e-49a8-86fb-8058b842bd72" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 608.446586] env[62096]: DEBUG nova.network.neutron [None req-c1291365-a5cc-4752-acce-920ad54dbd71 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 608.446966] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c1291365-a5cc-4752-acce-920ad54dbd71 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Expecting reply to msg a8001bb46011494e87057dea8e837c68 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 608.465428] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a8001bb46011494e87057dea8e837c68 [ 608.479030] env[62096]: DEBUG oslo_concurrency.lockutils [None req-b165e3d0-efb4-4eaf-a7d6-1c4c9099d3ba tempest-ImagesOneServerTestJSON-1002718403 tempest-ImagesOneServerTestJSON-1002718403-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 608.481651] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 149e8e47b612443f9fba6e4580c7e40c [ 608.988176] env[62096]: DEBUG nova.network.neutron [None req-c1291365-a5cc-4752-acce-920ad54dbd71 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 609.088903] env[62096]: DEBUG nova.compute.manager [req-1266f38e-da29-4040-9c5e-f6b8e5238ed9 req-47b149a0-964e-4cf9-9f72-fd4aa01fef6c service nova] [instance: 9bc49e7a-af22-4112-8609-348605599692] Received event network-changed-e34eb22a-a84c-4ad4-be42-ea372c508428 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 609.089482] env[62096]: DEBUG nova.compute.manager [req-1266f38e-da29-4040-9c5e-f6b8e5238ed9 req-47b149a0-964e-4cf9-9f72-fd4aa01fef6c service nova] [instance: 9bc49e7a-af22-4112-8609-348605599692] Refreshing instance network info cache due to event network-changed-e34eb22a-a84c-4ad4-be42-ea372c508428. {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 609.089482] env[62096]: DEBUG oslo_concurrency.lockutils [req-1266f38e-da29-4040-9c5e-f6b8e5238ed9 req-47b149a0-964e-4cf9-9f72-fd4aa01fef6c service nova] Acquiring lock "refresh_cache-9bc49e7a-af22-4112-8609-348605599692" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 609.089586] env[62096]: DEBUG oslo_concurrency.lockutils [req-1266f38e-da29-4040-9c5e-f6b8e5238ed9 req-47b149a0-964e-4cf9-9f72-fd4aa01fef6c service nova] Acquired lock "refresh_cache-9bc49e7a-af22-4112-8609-348605599692" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 609.089862] env[62096]: DEBUG nova.network.neutron [req-1266f38e-da29-4040-9c5e-f6b8e5238ed9 req-47b149a0-964e-4cf9-9f72-fd4aa01fef6c service nova] [instance: 9bc49e7a-af22-4112-8609-348605599692] Refreshing network info cache for port e34eb22a-a84c-4ad4-be42-ea372c508428 {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 609.090292] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-1266f38e-da29-4040-9c5e-f6b8e5238ed9 req-47b149a0-964e-4cf9-9f72-fd4aa01fef6c service nova] Expecting reply to msg 56a8e350fa02428eb044fa52f01e265e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 609.097339] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 56a8e350fa02428eb044fa52f01e265e [ 609.118114] env[62096]: DEBUG nova.network.neutron [None req-c1291365-a5cc-4752-acce-920ad54dbd71 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 609.119167] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c1291365-a5cc-4752-acce-920ad54dbd71 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Expecting reply to msg 9951995ef5244b38b99c94483c7aed2b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 609.128823] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9951995ef5244b38b99c94483c7aed2b [ 609.470085] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17b485fb-7584-4c4e-be5a-84903708f006 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.478751] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e7655c2-eb0b-47c4-a72a-dda2bcebae25 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.510154] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29dcca2c-9034-476c-902a-e91bf1b540b9 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.522218] env[62096]: ERROR nova.compute.manager [None req-dbe77661-ff6b-4cc0-8cbb-e6cfd25c54ab tempest-ServerDiagnosticsTest-1671101402 tempest-ServerDiagnosticsTest-1671101402-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port e34eb22a-a84c-4ad4-be42-ea372c508428, please check neutron logs for more information. [ 609.522218] env[62096]: ERROR nova.compute.manager Traceback (most recent call last): [ 609.522218] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 609.522218] env[62096]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 609.522218] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 609.522218] env[62096]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 609.522218] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 609.522218] env[62096]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 609.522218] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 609.522218] env[62096]: ERROR nova.compute.manager self.force_reraise() [ 609.522218] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 609.522218] env[62096]: ERROR nova.compute.manager raise self.value [ 609.522218] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 609.522218] env[62096]: ERROR nova.compute.manager updated_port = self._update_port( [ 609.522218] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 609.522218] env[62096]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 609.522658] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 609.522658] env[62096]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 609.522658] env[62096]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port e34eb22a-a84c-4ad4-be42-ea372c508428, please check neutron logs for more information. [ 609.522658] env[62096]: ERROR nova.compute.manager [ 609.522658] env[62096]: Traceback (most recent call last): [ 609.522658] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 609.522658] env[62096]: listener.cb(fileno) [ 609.522658] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 609.522658] env[62096]: result = function(*args, **kwargs) [ 609.522658] env[62096]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 609.522658] env[62096]: return func(*args, **kwargs) [ 609.522658] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 609.522658] env[62096]: raise e [ 609.522658] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 609.522658] env[62096]: nwinfo = self.network_api.allocate_for_instance( [ 609.522658] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 609.522658] env[62096]: created_port_ids = self._update_ports_for_instance( [ 609.522658] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 609.522658] env[62096]: with excutils.save_and_reraise_exception(): [ 609.522658] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 609.522658] env[62096]: self.force_reraise() [ 609.522658] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 609.522658] env[62096]: raise self.value [ 609.522658] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 609.522658] env[62096]: updated_port = self._update_port( [ 609.522658] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 609.522658] env[62096]: _ensure_no_port_binding_failure(port) [ 609.522658] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 609.522658] env[62096]: raise exception.PortBindingFailed(port_id=port['id']) [ 609.523357] env[62096]: nova.exception.PortBindingFailed: Binding failed for port e34eb22a-a84c-4ad4-be42-ea372c508428, please check neutron logs for more information. [ 609.523357] env[62096]: Removing descriptor: 14 [ 609.523357] env[62096]: ERROR nova.compute.manager [None req-dbe77661-ff6b-4cc0-8cbb-e6cfd25c54ab tempest-ServerDiagnosticsTest-1671101402 tempest-ServerDiagnosticsTest-1671101402-project-member] [instance: 9bc49e7a-af22-4112-8609-348605599692] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port e34eb22a-a84c-4ad4-be42-ea372c508428, please check neutron logs for more information. [ 609.523357] env[62096]: ERROR nova.compute.manager [instance: 9bc49e7a-af22-4112-8609-348605599692] Traceback (most recent call last): [ 609.523357] env[62096]: ERROR nova.compute.manager [instance: 9bc49e7a-af22-4112-8609-348605599692] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 609.523357] env[62096]: ERROR nova.compute.manager [instance: 9bc49e7a-af22-4112-8609-348605599692] yield resources [ 609.523357] env[62096]: ERROR nova.compute.manager [instance: 9bc49e7a-af22-4112-8609-348605599692] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 609.523357] env[62096]: ERROR nova.compute.manager [instance: 9bc49e7a-af22-4112-8609-348605599692] self.driver.spawn(context, instance, image_meta, [ 609.523357] env[62096]: ERROR nova.compute.manager [instance: 9bc49e7a-af22-4112-8609-348605599692] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 609.523357] env[62096]: ERROR nova.compute.manager [instance: 9bc49e7a-af22-4112-8609-348605599692] self._vmops.spawn(context, instance, image_meta, injected_files, [ 609.523357] env[62096]: ERROR nova.compute.manager [instance: 9bc49e7a-af22-4112-8609-348605599692] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 609.523357] env[62096]: ERROR nova.compute.manager [instance: 9bc49e7a-af22-4112-8609-348605599692] vm_ref = self.build_virtual_machine(instance, [ 609.523645] env[62096]: ERROR nova.compute.manager [instance: 9bc49e7a-af22-4112-8609-348605599692] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 609.523645] env[62096]: ERROR nova.compute.manager [instance: 9bc49e7a-af22-4112-8609-348605599692] vif_infos = vmwarevif.get_vif_info(self._session, [ 609.523645] env[62096]: ERROR nova.compute.manager [instance: 9bc49e7a-af22-4112-8609-348605599692] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 609.523645] env[62096]: ERROR nova.compute.manager [instance: 9bc49e7a-af22-4112-8609-348605599692] for vif in network_info: [ 609.523645] env[62096]: ERROR nova.compute.manager [instance: 9bc49e7a-af22-4112-8609-348605599692] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 609.523645] env[62096]: ERROR nova.compute.manager [instance: 9bc49e7a-af22-4112-8609-348605599692] return self._sync_wrapper(fn, *args, **kwargs) [ 609.523645] env[62096]: ERROR nova.compute.manager [instance: 9bc49e7a-af22-4112-8609-348605599692] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 609.523645] env[62096]: ERROR nova.compute.manager [instance: 9bc49e7a-af22-4112-8609-348605599692] self.wait() [ 609.523645] env[62096]: ERROR nova.compute.manager [instance: 9bc49e7a-af22-4112-8609-348605599692] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 609.523645] env[62096]: ERROR nova.compute.manager [instance: 9bc49e7a-af22-4112-8609-348605599692] self[:] = self._gt.wait() [ 609.523645] env[62096]: ERROR nova.compute.manager [instance: 9bc49e7a-af22-4112-8609-348605599692] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 609.523645] env[62096]: ERROR nova.compute.manager [instance: 9bc49e7a-af22-4112-8609-348605599692] return self._exit_event.wait() [ 609.523645] env[62096]: ERROR nova.compute.manager [instance: 9bc49e7a-af22-4112-8609-348605599692] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 609.523972] env[62096]: ERROR nova.compute.manager [instance: 9bc49e7a-af22-4112-8609-348605599692] result = hub.switch() [ 609.523972] env[62096]: ERROR nova.compute.manager [instance: 9bc49e7a-af22-4112-8609-348605599692] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 609.523972] env[62096]: ERROR nova.compute.manager [instance: 9bc49e7a-af22-4112-8609-348605599692] return self.greenlet.switch() [ 609.523972] env[62096]: ERROR nova.compute.manager [instance: 9bc49e7a-af22-4112-8609-348605599692] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 609.523972] env[62096]: ERROR nova.compute.manager [instance: 9bc49e7a-af22-4112-8609-348605599692] result = function(*args, **kwargs) [ 609.523972] env[62096]: ERROR nova.compute.manager [instance: 9bc49e7a-af22-4112-8609-348605599692] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 609.523972] env[62096]: ERROR nova.compute.manager [instance: 9bc49e7a-af22-4112-8609-348605599692] return func(*args, **kwargs) [ 609.523972] env[62096]: ERROR nova.compute.manager [instance: 9bc49e7a-af22-4112-8609-348605599692] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 609.523972] env[62096]: ERROR nova.compute.manager [instance: 9bc49e7a-af22-4112-8609-348605599692] raise e [ 609.523972] env[62096]: ERROR nova.compute.manager [instance: 9bc49e7a-af22-4112-8609-348605599692] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 609.523972] env[62096]: ERROR nova.compute.manager [instance: 9bc49e7a-af22-4112-8609-348605599692] nwinfo = self.network_api.allocate_for_instance( [ 609.523972] env[62096]: ERROR nova.compute.manager [instance: 9bc49e7a-af22-4112-8609-348605599692] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 609.523972] env[62096]: ERROR nova.compute.manager [instance: 9bc49e7a-af22-4112-8609-348605599692] created_port_ids = self._update_ports_for_instance( [ 609.524283] env[62096]: ERROR nova.compute.manager [instance: 9bc49e7a-af22-4112-8609-348605599692] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 609.524283] env[62096]: ERROR nova.compute.manager [instance: 9bc49e7a-af22-4112-8609-348605599692] with excutils.save_and_reraise_exception(): [ 609.524283] env[62096]: ERROR nova.compute.manager [instance: 9bc49e7a-af22-4112-8609-348605599692] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 609.524283] env[62096]: ERROR nova.compute.manager [instance: 9bc49e7a-af22-4112-8609-348605599692] self.force_reraise() [ 609.524283] env[62096]: ERROR nova.compute.manager [instance: 9bc49e7a-af22-4112-8609-348605599692] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 609.524283] env[62096]: ERROR nova.compute.manager [instance: 9bc49e7a-af22-4112-8609-348605599692] raise self.value [ 609.524283] env[62096]: ERROR nova.compute.manager [instance: 9bc49e7a-af22-4112-8609-348605599692] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 609.524283] env[62096]: ERROR nova.compute.manager [instance: 9bc49e7a-af22-4112-8609-348605599692] updated_port = self._update_port( [ 609.524283] env[62096]: ERROR nova.compute.manager [instance: 9bc49e7a-af22-4112-8609-348605599692] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 609.524283] env[62096]: ERROR nova.compute.manager [instance: 9bc49e7a-af22-4112-8609-348605599692] _ensure_no_port_binding_failure(port) [ 609.524283] env[62096]: ERROR nova.compute.manager [instance: 9bc49e7a-af22-4112-8609-348605599692] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 609.524283] env[62096]: ERROR nova.compute.manager [instance: 9bc49e7a-af22-4112-8609-348605599692] raise exception.PortBindingFailed(port_id=port['id']) [ 609.524561] env[62096]: ERROR nova.compute.manager [instance: 9bc49e7a-af22-4112-8609-348605599692] nova.exception.PortBindingFailed: Binding failed for port e34eb22a-a84c-4ad4-be42-ea372c508428, please check neutron logs for more information. [ 609.524561] env[62096]: ERROR nova.compute.manager [instance: 9bc49e7a-af22-4112-8609-348605599692] [ 609.524561] env[62096]: INFO nova.compute.manager [None req-dbe77661-ff6b-4cc0-8cbb-e6cfd25c54ab tempest-ServerDiagnosticsTest-1671101402 tempest-ServerDiagnosticsTest-1671101402-project-member] [instance: 9bc49e7a-af22-4112-8609-348605599692] Terminating instance [ 609.525954] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b54e8afd-4877-455f-a2af-cb0ad51d4663 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.539534] env[62096]: DEBUG oslo_concurrency.lockutils [None req-dbe77661-ff6b-4cc0-8cbb-e6cfd25c54ab tempest-ServerDiagnosticsTest-1671101402 tempest-ServerDiagnosticsTest-1671101402-project-member] Acquiring lock "refresh_cache-9bc49e7a-af22-4112-8609-348605599692" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 609.556038] env[62096]: DEBUG nova.compute.provider_tree [None req-19c9aad9-d898-4c0f-b588-f60abf561c62 tempest-ServersTestBootFromVolume-2123063213 tempest-ServersTestBootFromVolume-2123063213-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 609.556584] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-19c9aad9-d898-4c0f-b588-f60abf561c62 tempest-ServersTestBootFromVolume-2123063213 tempest-ServersTestBootFromVolume-2123063213-project-member] Expecting reply to msg d2a2f41fa59546948651cfbf846c5f4b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 609.564486] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d2a2f41fa59546948651cfbf846c5f4b [ 609.621711] env[62096]: DEBUG nova.network.neutron [req-1266f38e-da29-4040-9c5e-f6b8e5238ed9 req-47b149a0-964e-4cf9-9f72-fd4aa01fef6c service nova] [instance: 9bc49e7a-af22-4112-8609-348605599692] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 609.623948] env[62096]: DEBUG oslo_concurrency.lockutils [None req-c1291365-a5cc-4752-acce-920ad54dbd71 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Releasing lock "refresh_cache-09410bd9-aa4e-49a8-86fb-8058b842bd72" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 609.624195] env[62096]: DEBUG nova.compute.manager [None req-c1291365-a5cc-4752-acce-920ad54dbd71 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 609.624467] env[62096]: DEBUG nova.compute.manager [None req-c1291365-a5cc-4752-acce-920ad54dbd71 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 609.624531] env[62096]: DEBUG nova.network.neutron [None req-c1291365-a5cc-4752-acce-920ad54dbd71 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 609.639299] env[62096]: DEBUG nova.network.neutron [None req-c1291365-a5cc-4752-acce-920ad54dbd71 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 609.639846] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c1291365-a5cc-4752-acce-920ad54dbd71 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Expecting reply to msg f892b13f3beb42278fb5d5d3f5451c80 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 609.650030] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f892b13f3beb42278fb5d5d3f5451c80 [ 609.741267] env[62096]: DEBUG nova.network.neutron [req-1266f38e-da29-4040-9c5e-f6b8e5238ed9 req-47b149a0-964e-4cf9-9f72-fd4aa01fef6c service nova] [instance: 9bc49e7a-af22-4112-8609-348605599692] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 609.741879] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-1266f38e-da29-4040-9c5e-f6b8e5238ed9 req-47b149a0-964e-4cf9-9f72-fd4aa01fef6c service nova] Expecting reply to msg 0d8e6a5c069f40eb9dce10b58e5c36f5 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 609.754470] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0d8e6a5c069f40eb9dce10b58e5c36f5 [ 610.065339] env[62096]: DEBUG nova.scheduler.client.report [None req-19c9aad9-d898-4c0f-b588-f60abf561c62 tempest-ServersTestBootFromVolume-2123063213 tempest-ServersTestBootFromVolume-2123063213-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 610.067810] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-19c9aad9-d898-4c0f-b588-f60abf561c62 tempest-ServersTestBootFromVolume-2123063213 tempest-ServersTestBootFromVolume-2123063213-project-member] Expecting reply to msg 902c4e422c2945e0bd5f166b29fec6c0 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 610.089325] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 902c4e422c2945e0bd5f166b29fec6c0 [ 610.147450] env[62096]: DEBUG nova.network.neutron [None req-c1291365-a5cc-4752-acce-920ad54dbd71 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 610.148457] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c1291365-a5cc-4752-acce-920ad54dbd71 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Expecting reply to msg 496defaa24fd4a70b1cd36b3512ca602 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 610.161937] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 496defaa24fd4a70b1cd36b3512ca602 [ 610.244285] env[62096]: DEBUG oslo_concurrency.lockutils [req-1266f38e-da29-4040-9c5e-f6b8e5238ed9 req-47b149a0-964e-4cf9-9f72-fd4aa01fef6c service nova] Releasing lock "refresh_cache-9bc49e7a-af22-4112-8609-348605599692" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 610.244703] env[62096]: DEBUG oslo_concurrency.lockutils [None req-dbe77661-ff6b-4cc0-8cbb-e6cfd25c54ab tempest-ServerDiagnosticsTest-1671101402 tempest-ServerDiagnosticsTest-1671101402-project-member] Acquired lock "refresh_cache-9bc49e7a-af22-4112-8609-348605599692" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 610.244896] env[62096]: DEBUG nova.network.neutron [None req-dbe77661-ff6b-4cc0-8cbb-e6cfd25c54ab tempest-ServerDiagnosticsTest-1671101402 tempest-ServerDiagnosticsTest-1671101402-project-member] [instance: 9bc49e7a-af22-4112-8609-348605599692] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 610.245496] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-dbe77661-ff6b-4cc0-8cbb-e6cfd25c54ab tempest-ServerDiagnosticsTest-1671101402 tempest-ServerDiagnosticsTest-1671101402-project-member] Expecting reply to msg 1d7320c8e3ab48578ca3aef3a2f38b75 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 610.259980] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1d7320c8e3ab48578ca3aef3a2f38b75 [ 610.570060] env[62096]: DEBUG oslo_concurrency.lockutils [None req-19c9aad9-d898-4c0f-b588-f60abf561c62 tempest-ServersTestBootFromVolume-2123063213 tempest-ServersTestBootFromVolume-2123063213-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.129s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 610.570731] env[62096]: ERROR nova.compute.manager [None req-19c9aad9-d898-4c0f-b588-f60abf561c62 tempest-ServersTestBootFromVolume-2123063213 tempest-ServersTestBootFromVolume-2123063213-project-member] [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port e792c2a8-f6e8-407d-9d05-07ff5ba3feb0, please check neutron logs for more information. [ 610.570731] env[62096]: ERROR nova.compute.manager [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] Traceback (most recent call last): [ 610.570731] env[62096]: ERROR nova.compute.manager [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 610.570731] env[62096]: ERROR nova.compute.manager [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] self.driver.spawn(context, instance, image_meta, [ 610.570731] env[62096]: ERROR nova.compute.manager [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 610.570731] env[62096]: ERROR nova.compute.manager [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] self._vmops.spawn(context, instance, image_meta, injected_files, [ 610.570731] env[62096]: ERROR nova.compute.manager [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 610.570731] env[62096]: ERROR nova.compute.manager [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] vm_ref = self.build_virtual_machine(instance, [ 610.570731] env[62096]: ERROR nova.compute.manager [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 610.570731] env[62096]: ERROR nova.compute.manager [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] vif_infos = vmwarevif.get_vif_info(self._session, [ 610.570731] env[62096]: ERROR nova.compute.manager [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 610.571126] env[62096]: ERROR nova.compute.manager [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] for vif in network_info: [ 610.571126] env[62096]: ERROR nova.compute.manager [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 610.571126] env[62096]: ERROR nova.compute.manager [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] return self._sync_wrapper(fn, *args, **kwargs) [ 610.571126] env[62096]: ERROR nova.compute.manager [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 610.571126] env[62096]: ERROR nova.compute.manager [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] self.wait() [ 610.571126] env[62096]: ERROR nova.compute.manager [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 610.571126] env[62096]: ERROR nova.compute.manager [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] self[:] = self._gt.wait() [ 610.571126] env[62096]: ERROR nova.compute.manager [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 610.571126] env[62096]: ERROR nova.compute.manager [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] return self._exit_event.wait() [ 610.571126] env[62096]: ERROR nova.compute.manager [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 610.571126] env[62096]: ERROR nova.compute.manager [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] current.throw(*self._exc) [ 610.571126] env[62096]: ERROR nova.compute.manager [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 610.571126] env[62096]: ERROR nova.compute.manager [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] result = function(*args, **kwargs) [ 610.571506] env[62096]: ERROR nova.compute.manager [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 610.571506] env[62096]: ERROR nova.compute.manager [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] return func(*args, **kwargs) [ 610.571506] env[62096]: ERROR nova.compute.manager [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 610.571506] env[62096]: ERROR nova.compute.manager [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] raise e [ 610.571506] env[62096]: ERROR nova.compute.manager [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 610.571506] env[62096]: ERROR nova.compute.manager [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] nwinfo = self.network_api.allocate_for_instance( [ 610.571506] env[62096]: ERROR nova.compute.manager [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 610.571506] env[62096]: ERROR nova.compute.manager [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] created_port_ids = self._update_ports_for_instance( [ 610.571506] env[62096]: ERROR nova.compute.manager [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 610.571506] env[62096]: ERROR nova.compute.manager [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] with excutils.save_and_reraise_exception(): [ 610.571506] env[62096]: ERROR nova.compute.manager [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 610.571506] env[62096]: ERROR nova.compute.manager [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] self.force_reraise() [ 610.571506] env[62096]: ERROR nova.compute.manager [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 610.571834] env[62096]: ERROR nova.compute.manager [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] raise self.value [ 610.571834] env[62096]: ERROR nova.compute.manager [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 610.571834] env[62096]: ERROR nova.compute.manager [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] updated_port = self._update_port( [ 610.571834] env[62096]: ERROR nova.compute.manager [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 610.571834] env[62096]: ERROR nova.compute.manager [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] _ensure_no_port_binding_failure(port) [ 610.571834] env[62096]: ERROR nova.compute.manager [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 610.571834] env[62096]: ERROR nova.compute.manager [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] raise exception.PortBindingFailed(port_id=port['id']) [ 610.571834] env[62096]: ERROR nova.compute.manager [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] nova.exception.PortBindingFailed: Binding failed for port e792c2a8-f6e8-407d-9d05-07ff5ba3feb0, please check neutron logs for more information. [ 610.571834] env[62096]: ERROR nova.compute.manager [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] [ 610.571834] env[62096]: DEBUG nova.compute.utils [None req-19c9aad9-d898-4c0f-b588-f60abf561c62 tempest-ServersTestBootFromVolume-2123063213 tempest-ServersTestBootFromVolume-2123063213-project-member] [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] Binding failed for port e792c2a8-f6e8-407d-9d05-07ff5ba3feb0, please check neutron logs for more information. {{(pid=62096) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 610.572724] env[62096]: DEBUG oslo_concurrency.lockutils [None req-c02b4028-73ad-45a0-8da0-06fbbeeeef87 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.790s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 610.574466] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c02b4028-73ad-45a0-8da0-06fbbeeeef87 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Expecting reply to msg 8eac4b2af33b4cd1a8f0169270de4005 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 610.575630] env[62096]: DEBUG nova.compute.manager [None req-19c9aad9-d898-4c0f-b588-f60abf561c62 tempest-ServersTestBootFromVolume-2123063213 tempest-ServersTestBootFromVolume-2123063213-project-member] [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] Build of instance 4fca4cf6-1c0e-487e-bf26-fc441d143128 was re-scheduled: Binding failed for port e792c2a8-f6e8-407d-9d05-07ff5ba3feb0, please check neutron logs for more information. {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 610.576124] env[62096]: DEBUG nova.compute.manager [None req-19c9aad9-d898-4c0f-b588-f60abf561c62 tempest-ServersTestBootFromVolume-2123063213 tempest-ServersTestBootFromVolume-2123063213-project-member] [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] Unplugging VIFs for instance {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 610.576313] env[62096]: DEBUG oslo_concurrency.lockutils [None req-19c9aad9-d898-4c0f-b588-f60abf561c62 tempest-ServersTestBootFromVolume-2123063213 tempest-ServersTestBootFromVolume-2123063213-project-member] Acquiring lock "refresh_cache-4fca4cf6-1c0e-487e-bf26-fc441d143128" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 610.576450] env[62096]: DEBUG oslo_concurrency.lockutils [None req-19c9aad9-d898-4c0f-b588-f60abf561c62 tempest-ServersTestBootFromVolume-2123063213 tempest-ServersTestBootFromVolume-2123063213-project-member] Acquired lock "refresh_cache-4fca4cf6-1c0e-487e-bf26-fc441d143128" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 610.576601] env[62096]: DEBUG nova.network.neutron [None req-19c9aad9-d898-4c0f-b588-f60abf561c62 tempest-ServersTestBootFromVolume-2123063213 tempest-ServersTestBootFromVolume-2123063213-project-member] [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 610.577043] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-19c9aad9-d898-4c0f-b588-f60abf561c62 tempest-ServersTestBootFromVolume-2123063213 tempest-ServersTestBootFromVolume-2123063213-project-member] Expecting reply to msg 06d8a4ef28ce46519289e3d9c99619fb in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 610.584730] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 06d8a4ef28ce46519289e3d9c99619fb [ 610.612317] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8eac4b2af33b4cd1a8f0169270de4005 [ 610.653912] env[62096]: INFO nova.compute.manager [None req-c1291365-a5cc-4752-acce-920ad54dbd71 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] [instance: 09410bd9-aa4e-49a8-86fb-8058b842bd72] Took 1.03 seconds to deallocate network for instance. [ 610.653912] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c1291365-a5cc-4752-acce-920ad54dbd71 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Expecting reply to msg a042f9da1d2d4430aa52edc4b7975348 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 610.698333] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a042f9da1d2d4430aa52edc4b7975348 [ 610.793964] env[62096]: DEBUG nova.network.neutron [None req-dbe77661-ff6b-4cc0-8cbb-e6cfd25c54ab tempest-ServerDiagnosticsTest-1671101402 tempest-ServerDiagnosticsTest-1671101402-project-member] [instance: 9bc49e7a-af22-4112-8609-348605599692] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 611.078668] env[62096]: DEBUG nova.network.neutron [None req-dbe77661-ff6b-4cc0-8cbb-e6cfd25c54ab tempest-ServerDiagnosticsTest-1671101402 tempest-ServerDiagnosticsTest-1671101402-project-member] [instance: 9bc49e7a-af22-4112-8609-348605599692] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 611.079247] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-dbe77661-ff6b-4cc0-8cbb-e6cfd25c54ab tempest-ServerDiagnosticsTest-1671101402 tempest-ServerDiagnosticsTest-1671101402-project-member] Expecting reply to msg f04365e84235493b892ea6136ad35e6c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 611.097704] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f04365e84235493b892ea6136ad35e6c [ 611.120875] env[62096]: DEBUG nova.network.neutron [None req-19c9aad9-d898-4c0f-b588-f60abf561c62 tempest-ServersTestBootFromVolume-2123063213 tempest-ServersTestBootFromVolume-2123063213-project-member] [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 611.158519] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c1291365-a5cc-4752-acce-920ad54dbd71 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Expecting reply to msg 6137d7b0f1ef45e986986b8affbb4dc1 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 611.202640] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6137d7b0f1ef45e986986b8affbb4dc1 [ 611.252390] env[62096]: DEBUG nova.network.neutron [None req-19c9aad9-d898-4c0f-b588-f60abf561c62 tempest-ServersTestBootFromVolume-2123063213 tempest-ServersTestBootFromVolume-2123063213-project-member] [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 611.252916] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-19c9aad9-d898-4c0f-b588-f60abf561c62 tempest-ServersTestBootFromVolume-2123063213 tempest-ServersTestBootFromVolume-2123063213-project-member] Expecting reply to msg 22ad5b4cdee2440c83e79b2bbc3363e8 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 611.264907] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 22ad5b4cdee2440c83e79b2bbc3363e8 [ 611.380828] env[62096]: DEBUG nova.compute.manager [req-020ae91b-4679-4bf8-8b10-2306944feb03 req-26461977-dda7-4393-aabe-4f9a6ac291b4 service nova] [instance: 9bc49e7a-af22-4112-8609-348605599692] Received event network-vif-deleted-e34eb22a-a84c-4ad4-be42-ea372c508428 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 611.587182] env[62096]: DEBUG oslo_concurrency.lockutils [None req-dbe77661-ff6b-4cc0-8cbb-e6cfd25c54ab tempest-ServerDiagnosticsTest-1671101402 tempest-ServerDiagnosticsTest-1671101402-project-member] Releasing lock "refresh_cache-9bc49e7a-af22-4112-8609-348605599692" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 611.587583] env[62096]: DEBUG nova.compute.manager [None req-dbe77661-ff6b-4cc0-8cbb-e6cfd25c54ab tempest-ServerDiagnosticsTest-1671101402 tempest-ServerDiagnosticsTest-1671101402-project-member] [instance: 9bc49e7a-af22-4112-8609-348605599692] Start destroying the instance on the hypervisor. {{(pid=62096) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 611.587768] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-dbe77661-ff6b-4cc0-8cbb-e6cfd25c54ab tempest-ServerDiagnosticsTest-1671101402 tempest-ServerDiagnosticsTest-1671101402-project-member] [instance: 9bc49e7a-af22-4112-8609-348605599692] Destroying instance {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 611.588166] env[62096]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b37ef58a-8a04-4ebd-89c0-90f51943a80b {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.601630] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d24ed6a-3b23-41e9-99d7-773bc429a685 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.617353] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ac26b59-d1f8-4a18-b5c0-47a4223919e3 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.625993] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58cbb480-dbdc-417d-95e7-51ef6bcae630 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.642780] env[62096]: WARNING nova.virt.vmwareapi.vmops [None req-dbe77661-ff6b-4cc0-8cbb-e6cfd25c54ab tempest-ServerDiagnosticsTest-1671101402 tempest-ServerDiagnosticsTest-1671101402-project-member] [instance: 9bc49e7a-af22-4112-8609-348605599692] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 9bc49e7a-af22-4112-8609-348605599692 could not be found. [ 611.643017] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-dbe77661-ff6b-4cc0-8cbb-e6cfd25c54ab tempest-ServerDiagnosticsTest-1671101402 tempest-ServerDiagnosticsTest-1671101402-project-member] [instance: 9bc49e7a-af22-4112-8609-348605599692] Instance destroyed {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 611.643208] env[62096]: INFO nova.compute.manager [None req-dbe77661-ff6b-4cc0-8cbb-e6cfd25c54ab tempest-ServerDiagnosticsTest-1671101402 tempest-ServerDiagnosticsTest-1671101402-project-member] [instance: 9bc49e7a-af22-4112-8609-348605599692] Took 0.06 seconds to destroy the instance on the hypervisor. [ 611.643464] env[62096]: DEBUG oslo.service.loopingcall [None req-dbe77661-ff6b-4cc0-8cbb-e6cfd25c54ab tempest-ServerDiagnosticsTest-1671101402 tempest-ServerDiagnosticsTest-1671101402-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 611.644411] env[62096]: DEBUG nova.compute.manager [-] [instance: 9bc49e7a-af22-4112-8609-348605599692] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 611.644411] env[62096]: DEBUG nova.network.neutron [-] [instance: 9bc49e7a-af22-4112-8609-348605599692] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 611.677976] env[62096]: DEBUG nova.network.neutron [-] [instance: 9bc49e7a-af22-4112-8609-348605599692] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 611.678440] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg e5f8a2e0d51c4e4aacc5c345a9c926ca in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 611.679989] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1a70b95-fb69-4eef-90e7-dea54e60a1ab {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.686219] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e5f8a2e0d51c4e4aacc5c345a9c926ca [ 611.689802] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9959a89a-d6b9-4639-b2a2-8972d5abbcd8 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.697905] env[62096]: INFO nova.scheduler.client.report [None req-c1291365-a5cc-4752-acce-920ad54dbd71 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Deleted allocations for instance 09410bd9-aa4e-49a8-86fb-8058b842bd72 [ 611.705202] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c1291365-a5cc-4752-acce-920ad54dbd71 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Expecting reply to msg ff4f6d03f1644a0ab1745e75f820c65d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 611.714313] env[62096]: DEBUG nova.compute.provider_tree [None req-c02b4028-73ad-45a0-8da0-06fbbeeeef87 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 611.714789] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c02b4028-73ad-45a0-8da0-06fbbeeeef87 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Expecting reply to msg 7de18c0c07f44e33887ee226e0281d53 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 611.723281] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ff4f6d03f1644a0ab1745e75f820c65d [ 611.730445] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7de18c0c07f44e33887ee226e0281d53 [ 611.755654] env[62096]: DEBUG oslo_concurrency.lockutils [None req-19c9aad9-d898-4c0f-b588-f60abf561c62 tempest-ServersTestBootFromVolume-2123063213 tempest-ServersTestBootFromVolume-2123063213-project-member] Releasing lock "refresh_cache-4fca4cf6-1c0e-487e-bf26-fc441d143128" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 611.755927] env[62096]: DEBUG nova.compute.manager [None req-19c9aad9-d898-4c0f-b588-f60abf561c62 tempest-ServersTestBootFromVolume-2123063213 tempest-ServersTestBootFromVolume-2123063213-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 611.756133] env[62096]: DEBUG nova.compute.manager [None req-19c9aad9-d898-4c0f-b588-f60abf561c62 tempest-ServersTestBootFromVolume-2123063213 tempest-ServersTestBootFromVolume-2123063213-project-member] [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 611.756301] env[62096]: DEBUG nova.network.neutron [None req-19c9aad9-d898-4c0f-b588-f60abf561c62 tempest-ServersTestBootFromVolume-2123063213 tempest-ServersTestBootFromVolume-2123063213-project-member] [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 611.775175] env[62096]: DEBUG nova.network.neutron [None req-19c9aad9-d898-4c0f-b588-f60abf561c62 tempest-ServersTestBootFromVolume-2123063213 tempest-ServersTestBootFromVolume-2123063213-project-member] [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 611.775895] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-19c9aad9-d898-4c0f-b588-f60abf561c62 tempest-ServersTestBootFromVolume-2123063213 tempest-ServersTestBootFromVolume-2123063213-project-member] Expecting reply to msg 240be81338ca4ae5b07c84fe5a9d04f2 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 611.782281] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 240be81338ca4ae5b07c84fe5a9d04f2 [ 612.153489] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 940f13601315406cbca41a01e35987d2 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 612.162680] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 940f13601315406cbca41a01e35987d2 [ 612.182966] env[62096]: DEBUG nova.network.neutron [-] [instance: 9bc49e7a-af22-4112-8609-348605599692] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 612.183386] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg ed2409e02e214b8196a95cff49ca5a94 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 612.191701] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ed2409e02e214b8196a95cff49ca5a94 [ 612.217252] env[62096]: DEBUG oslo_concurrency.lockutils [None req-c1291365-a5cc-4752-acce-920ad54dbd71 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Lock "09410bd9-aa4e-49a8-86fb-8058b842bd72" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 61.594s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 612.218048] env[62096]: DEBUG nova.scheduler.client.report [None req-c02b4028-73ad-45a0-8da0-06fbbeeeef87 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 612.220760] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c02b4028-73ad-45a0-8da0-06fbbeeeef87 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Expecting reply to msg 16203c49c396416fb9da3f504db3108f in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 612.222584] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Expecting reply to msg 62e9979ab59144ae83b17270ac69cc31 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 612.232566] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 16203c49c396416fb9da3f504db3108f [ 612.234133] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 62e9979ab59144ae83b17270ac69cc31 [ 612.283068] env[62096]: DEBUG nova.network.neutron [None req-19c9aad9-d898-4c0f-b588-f60abf561c62 tempest-ServersTestBootFromVolume-2123063213 tempest-ServersTestBootFromVolume-2123063213-project-member] [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 612.283591] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-19c9aad9-d898-4c0f-b588-f60abf561c62 tempest-ServersTestBootFromVolume-2123063213 tempest-ServersTestBootFromVolume-2123063213-project-member] Expecting reply to msg 47dffeebf39e47c28d660356d1274802 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 612.292606] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 47dffeebf39e47c28d660356d1274802 [ 612.686120] env[62096]: INFO nova.compute.manager [-] [instance: 9bc49e7a-af22-4112-8609-348605599692] Took 1.04 seconds to deallocate network for instance. [ 612.688549] env[62096]: DEBUG nova.compute.claims [None req-dbe77661-ff6b-4cc0-8cbb-e6cfd25c54ab tempest-ServerDiagnosticsTest-1671101402 tempest-ServerDiagnosticsTest-1671101402-project-member] [instance: 9bc49e7a-af22-4112-8609-348605599692] Aborting claim: {{(pid=62096) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 612.688735] env[62096]: DEBUG oslo_concurrency.lockutils [None req-dbe77661-ff6b-4cc0-8cbb-e6cfd25c54ab tempest-ServerDiagnosticsTest-1671101402 tempest-ServerDiagnosticsTest-1671101402-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 612.723731] env[62096]: DEBUG oslo_concurrency.lockutils [None req-c02b4028-73ad-45a0-8da0-06fbbeeeef87 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.151s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 612.724410] env[62096]: ERROR nova.compute.manager [None req-c02b4028-73ad-45a0-8da0-06fbbeeeef87 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 2371b750-f7b3-4954-904e-f8f99fe6cc78, please check neutron logs for more information. [ 612.724410] env[62096]: ERROR nova.compute.manager [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] Traceback (most recent call last): [ 612.724410] env[62096]: ERROR nova.compute.manager [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 612.724410] env[62096]: ERROR nova.compute.manager [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] self.driver.spawn(context, instance, image_meta, [ 612.724410] env[62096]: ERROR nova.compute.manager [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 612.724410] env[62096]: ERROR nova.compute.manager [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] self._vmops.spawn(context, instance, image_meta, injected_files, [ 612.724410] env[62096]: ERROR nova.compute.manager [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 612.724410] env[62096]: ERROR nova.compute.manager [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] vm_ref = self.build_virtual_machine(instance, [ 612.724410] env[62096]: ERROR nova.compute.manager [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 612.724410] env[62096]: ERROR nova.compute.manager [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] vif_infos = vmwarevif.get_vif_info(self._session, [ 612.724410] env[62096]: ERROR nova.compute.manager [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 612.724745] env[62096]: ERROR nova.compute.manager [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] for vif in network_info: [ 612.724745] env[62096]: ERROR nova.compute.manager [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 612.724745] env[62096]: ERROR nova.compute.manager [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] return self._sync_wrapper(fn, *args, **kwargs) [ 612.724745] env[62096]: ERROR nova.compute.manager [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 612.724745] env[62096]: ERROR nova.compute.manager [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] self.wait() [ 612.724745] env[62096]: ERROR nova.compute.manager [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 612.724745] env[62096]: ERROR nova.compute.manager [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] self[:] = self._gt.wait() [ 612.724745] env[62096]: ERROR nova.compute.manager [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 612.724745] env[62096]: ERROR nova.compute.manager [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] return self._exit_event.wait() [ 612.724745] env[62096]: ERROR nova.compute.manager [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 612.724745] env[62096]: ERROR nova.compute.manager [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] result = hub.switch() [ 612.724745] env[62096]: ERROR nova.compute.manager [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 612.724745] env[62096]: ERROR nova.compute.manager [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] return self.greenlet.switch() [ 612.725216] env[62096]: ERROR nova.compute.manager [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 612.725216] env[62096]: ERROR nova.compute.manager [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] result = function(*args, **kwargs) [ 612.725216] env[62096]: ERROR nova.compute.manager [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 612.725216] env[62096]: ERROR nova.compute.manager [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] return func(*args, **kwargs) [ 612.725216] env[62096]: ERROR nova.compute.manager [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 612.725216] env[62096]: ERROR nova.compute.manager [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] raise e [ 612.725216] env[62096]: ERROR nova.compute.manager [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 612.725216] env[62096]: ERROR nova.compute.manager [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] nwinfo = self.network_api.allocate_for_instance( [ 612.725216] env[62096]: ERROR nova.compute.manager [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 612.725216] env[62096]: ERROR nova.compute.manager [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] created_port_ids = self._update_ports_for_instance( [ 612.725216] env[62096]: ERROR nova.compute.manager [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 612.725216] env[62096]: ERROR nova.compute.manager [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] with excutils.save_and_reraise_exception(): [ 612.725216] env[62096]: ERROR nova.compute.manager [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 612.725558] env[62096]: ERROR nova.compute.manager [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] self.force_reraise() [ 612.725558] env[62096]: ERROR nova.compute.manager [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 612.725558] env[62096]: ERROR nova.compute.manager [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] raise self.value [ 612.725558] env[62096]: ERROR nova.compute.manager [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 612.725558] env[62096]: ERROR nova.compute.manager [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] updated_port = self._update_port( [ 612.725558] env[62096]: ERROR nova.compute.manager [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 612.725558] env[62096]: ERROR nova.compute.manager [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] _ensure_no_port_binding_failure(port) [ 612.725558] env[62096]: ERROR nova.compute.manager [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 612.725558] env[62096]: ERROR nova.compute.manager [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] raise exception.PortBindingFailed(port_id=port['id']) [ 612.725558] env[62096]: ERROR nova.compute.manager [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] nova.exception.PortBindingFailed: Binding failed for port 2371b750-f7b3-4954-904e-f8f99fe6cc78, please check neutron logs for more information. [ 612.725558] env[62096]: ERROR nova.compute.manager [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] [ 612.725862] env[62096]: DEBUG nova.compute.utils [None req-c02b4028-73ad-45a0-8da0-06fbbeeeef87 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] Binding failed for port 2371b750-f7b3-4954-904e-f8f99fe6cc78, please check neutron logs for more information. {{(pid=62096) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 612.726298] env[62096]: DEBUG nova.compute.manager [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] [instance: 27d12301-a049-4d1e-b171-a09a642703fb] Starting instance... {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 612.727857] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Expecting reply to msg 8ce413d458094b12bdddf98dab03ef16 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 612.729471] env[62096]: DEBUG oslo_concurrency.lockutils [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.622s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 612.730918] env[62096]: INFO nova.compute.claims [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] [instance: 12a4b44d-b6aa-45e2-bba9-d73f41fa4b61] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 612.732412] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Expecting reply to msg 1aca6aceef6b401c85b741ad58a408ed in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 612.734141] env[62096]: DEBUG nova.compute.manager [None req-c02b4028-73ad-45a0-8da0-06fbbeeeef87 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] Build of instance 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317 was re-scheduled: Binding failed for port 2371b750-f7b3-4954-904e-f8f99fe6cc78, please check neutron logs for more information. {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 612.734243] env[62096]: DEBUG nova.compute.manager [None req-c02b4028-73ad-45a0-8da0-06fbbeeeef87 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] Unplugging VIFs for instance {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 612.734471] env[62096]: DEBUG oslo_concurrency.lockutils [None req-c02b4028-73ad-45a0-8da0-06fbbeeeef87 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Acquiring lock "refresh_cache-4fdcbe73-6d7e-4f89-b83d-e6b1664bd317" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 612.734614] env[62096]: DEBUG oslo_concurrency.lockutils [None req-c02b4028-73ad-45a0-8da0-06fbbeeeef87 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Acquired lock "refresh_cache-4fdcbe73-6d7e-4f89-b83d-e6b1664bd317" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 612.734765] env[62096]: DEBUG nova.network.neutron [None req-c02b4028-73ad-45a0-8da0-06fbbeeeef87 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 612.735114] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c02b4028-73ad-45a0-8da0-06fbbeeeef87 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Expecting reply to msg f517ad7ff2964974ad1c1c39e87031fb in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 612.752038] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f517ad7ff2964974ad1c1c39e87031fb [ 612.771212] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1aca6aceef6b401c85b741ad58a408ed [ 612.780632] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8ce413d458094b12bdddf98dab03ef16 [ 612.786013] env[62096]: INFO nova.compute.manager [None req-19c9aad9-d898-4c0f-b588-f60abf561c62 tempest-ServersTestBootFromVolume-2123063213 tempest-ServersTestBootFromVolume-2123063213-project-member] [instance: 4fca4cf6-1c0e-487e-bf26-fc441d143128] Took 1.03 seconds to deallocate network for instance. [ 612.788109] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-19c9aad9-d898-4c0f-b588-f60abf561c62 tempest-ServersTestBootFromVolume-2123063213 tempest-ServersTestBootFromVolume-2123063213-project-member] Expecting reply to msg 9b57ac4ddc624adca9c294bced1143d7 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 612.825743] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9b57ac4ddc624adca9c294bced1143d7 [ 613.236427] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Expecting reply to msg 39869c338b8e412d91c05db680eb8bce in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 613.245796] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 39869c338b8e412d91c05db680eb8bce [ 613.251645] env[62096]: DEBUG oslo_concurrency.lockutils [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 613.256063] env[62096]: DEBUG nova.network.neutron [None req-c02b4028-73ad-45a0-8da0-06fbbeeeef87 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 613.293509] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-19c9aad9-d898-4c0f-b588-f60abf561c62 tempest-ServersTestBootFromVolume-2123063213 tempest-ServersTestBootFromVolume-2123063213-project-member] Expecting reply to msg 67db42eb7d38472fb96ced50710630e4 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 613.333674] env[62096]: DEBUG nova.network.neutron [None req-c02b4028-73ad-45a0-8da0-06fbbeeeef87 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 613.334406] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c02b4028-73ad-45a0-8da0-06fbbeeeef87 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Expecting reply to msg 1eeb38ebf545499aa8172b774a1af307 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 613.336304] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 67db42eb7d38472fb96ced50710630e4 [ 613.341908] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1eeb38ebf545499aa8172b774a1af307 [ 613.821639] env[62096]: INFO nova.scheduler.client.report [None req-19c9aad9-d898-4c0f-b588-f60abf561c62 tempest-ServersTestBootFromVolume-2123063213 tempest-ServersTestBootFromVolume-2123063213-project-member] Deleted allocations for instance 4fca4cf6-1c0e-487e-bf26-fc441d143128 [ 613.834779] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-19c9aad9-d898-4c0f-b588-f60abf561c62 tempest-ServersTestBootFromVolume-2123063213 tempest-ServersTestBootFromVolume-2123063213-project-member] Expecting reply to msg 7957d5a4b21d474bb6c725ec13e5e178 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 613.836434] env[62096]: DEBUG oslo_concurrency.lockutils [None req-c02b4028-73ad-45a0-8da0-06fbbeeeef87 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Releasing lock "refresh_cache-4fdcbe73-6d7e-4f89-b83d-e6b1664bd317" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 613.839561] env[62096]: DEBUG nova.compute.manager [None req-c02b4028-73ad-45a0-8da0-06fbbeeeef87 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 613.839561] env[62096]: DEBUG nova.compute.manager [None req-c02b4028-73ad-45a0-8da0-06fbbeeeef87 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 613.839561] env[62096]: DEBUG nova.network.neutron [None req-c02b4028-73ad-45a0-8da0-06fbbeeeef87 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 613.858886] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7957d5a4b21d474bb6c725ec13e5e178 [ 613.871065] env[62096]: DEBUG nova.network.neutron [None req-c02b4028-73ad-45a0-8da0-06fbbeeeef87 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 613.871683] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c02b4028-73ad-45a0-8da0-06fbbeeeef87 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Expecting reply to msg cb54ae8ddcf4470e9718375b33a3e9da in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 613.882434] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cb54ae8ddcf4470e9718375b33a3e9da [ 614.196814] env[62096]: DEBUG oslo_concurrency.lockutils [None req-fce0ab80-047e-4dc5-9e51-67218a6b1057 tempest-AttachInterfacesUnderV243Test-218968522 tempest-AttachInterfacesUnderV243Test-218968522-project-member] Acquiring lock "59b37648-7b23-4ae3-90e6-867fbbde25df" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 614.196814] env[62096]: DEBUG oslo_concurrency.lockutils [None req-fce0ab80-047e-4dc5-9e51-67218a6b1057 tempest-AttachInterfacesUnderV243Test-218968522 tempest-AttachInterfacesUnderV243Test-218968522-project-member] Lock "59b37648-7b23-4ae3-90e6-867fbbde25df" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 614.254748] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4de6d573-3460-460f-9e68-222edd52e646 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.261420] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93ce9999-45d5-4245-824f-49d830975db6 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.300455] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31c92872-6319-4e80-9140-399382661427 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.310228] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b83e628-e338-48c2-9d49-1c852c32d11b {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.324784] env[62096]: DEBUG nova.compute.provider_tree [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 614.325457] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Expecting reply to msg 36fc53c9de424be7ab5a210401780c1f in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 614.332433] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 36fc53c9de424be7ab5a210401780c1f [ 614.336487] env[62096]: DEBUG oslo_concurrency.lockutils [None req-19c9aad9-d898-4c0f-b588-f60abf561c62 tempest-ServersTestBootFromVolume-2123063213 tempest-ServersTestBootFromVolume-2123063213-project-member] Lock "4fca4cf6-1c0e-487e-bf26-fc441d143128" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 63.672s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 614.337230] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Expecting reply to msg 281328c750bc45ab9d1b295429dd26e6 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 614.348795] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 281328c750bc45ab9d1b295429dd26e6 [ 614.377898] env[62096]: DEBUG nova.network.neutron [None req-c02b4028-73ad-45a0-8da0-06fbbeeeef87 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 614.377898] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c02b4028-73ad-45a0-8da0-06fbbeeeef87 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Expecting reply to msg 7c6636e65ebb4b939d9124c2225ae108 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 614.384721] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7c6636e65ebb4b939d9124c2225ae108 [ 614.827698] env[62096]: DEBUG nova.scheduler.client.report [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 614.830209] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Expecting reply to msg 90bb67e016c5477b87ff92aa8fc7e80f in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 614.839004] env[62096]: DEBUG nova.compute.manager [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] Starting instance... {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 614.840651] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Expecting reply to msg 52fe81d6278742eb98dfd0a49b136eb9 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 614.842939] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 90bb67e016c5477b87ff92aa8fc7e80f [ 614.878752] env[62096]: INFO nova.compute.manager [None req-c02b4028-73ad-45a0-8da0-06fbbeeeef87 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] [instance: 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317] Took 1.04 seconds to deallocate network for instance. [ 614.880426] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c02b4028-73ad-45a0-8da0-06fbbeeeef87 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Expecting reply to msg cf17c5f6086e430b83f9cad7cefec0ca in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 614.884383] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 52fe81d6278742eb98dfd0a49b136eb9 [ 614.922216] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cf17c5f6086e430b83f9cad7cefec0ca [ 615.333169] env[62096]: DEBUG oslo_concurrency.lockutils [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.604s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 615.333644] env[62096]: DEBUG nova.compute.manager [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] [instance: 12a4b44d-b6aa-45e2-bba9-d73f41fa4b61] Start building networks asynchronously for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 615.335327] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Expecting reply to msg 41628ba95fe94db48e2d7dd8a26924d0 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 615.345233] env[62096]: DEBUG oslo_concurrency.lockutils [None req-bff733c1-b59e-456d-a598-27648ab77c9f tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.354s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 615.347604] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-bff733c1-b59e-456d-a598-27648ab77c9f tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Expecting reply to msg 966cec586fe3413f96965f40a9321951 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 615.366453] env[62096]: DEBUG oslo_concurrency.lockutils [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 615.385338] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c02b4028-73ad-45a0-8da0-06fbbeeeef87 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Expecting reply to msg 82dc35ac6be647c1b38834150ad2b46d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 615.397490] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 966cec586fe3413f96965f40a9321951 [ 615.400838] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 41628ba95fe94db48e2d7dd8a26924d0 [ 615.428923] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 82dc35ac6be647c1b38834150ad2b46d [ 615.849181] env[62096]: DEBUG nova.compute.utils [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Using /dev/sd instead of None {{(pid=62096) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 615.849181] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Expecting reply to msg 9f7707d7469847eeba315615308482f0 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 615.849181] env[62096]: DEBUG nova.compute.manager [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] [instance: 12a4b44d-b6aa-45e2-bba9-d73f41fa4b61] Not allocating networking since 'none' was specified. {{(pid=62096) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 615.857755] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9f7707d7469847eeba315615308482f0 [ 615.917820] env[62096]: INFO nova.scheduler.client.report [None req-c02b4028-73ad-45a0-8da0-06fbbeeeef87 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Deleted allocations for instance 4fdcbe73-6d7e-4f89-b83d-e6b1664bd317 [ 615.936035] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c02b4028-73ad-45a0-8da0-06fbbeeeef87 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Expecting reply to msg 6494cc5b2efa45bfafe86fc1430ff597 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 615.948315] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6494cc5b2efa45bfafe86fc1430ff597 [ 616.324721] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70b00bef-836c-4646-8a29-4ce60e74fbfa {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.333071] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffd2c4f5-b128-4388-a54f-a0a5577b97e0 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.362996] env[62096]: DEBUG nova.compute.manager [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] [instance: 12a4b44d-b6aa-45e2-bba9-d73f41fa4b61] Start building block device mappings for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 616.364714] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Expecting reply to msg c688f647bcd4417fa1c6f26cdfa1bbf4 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 616.366376] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3594d0f5-f77a-41e2-a4d9-efe4a013061b {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.382440] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17070d76-f89f-41b4-8624-c67328d127c7 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.398976] env[62096]: DEBUG nova.compute.provider_tree [None req-bff733c1-b59e-456d-a598-27648ab77c9f tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 616.399654] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-bff733c1-b59e-456d-a598-27648ab77c9f tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Expecting reply to msg 84788f798ce34e8091f8d61528113af7 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 616.400882] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c688f647bcd4417fa1c6f26cdfa1bbf4 [ 616.408534] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 84788f798ce34e8091f8d61528113af7 [ 616.436643] env[62096]: DEBUG oslo_concurrency.lockutils [None req-c02b4028-73ad-45a0-8da0-06fbbeeeef87 tempest-ServersAdminTestJSON-915424119 tempest-ServersAdminTestJSON-915424119-project-member] Lock "4fdcbe73-6d7e-4f89-b83d-e6b1664bd317" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 63.339s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 616.437715] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Expecting reply to msg 8854648b72084c29b258e22214222ab0 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 616.449347] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8854648b72084c29b258e22214222ab0 [ 616.879397] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Expecting reply to msg 63af6917132140d593bdaa9e7ceff471 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 616.902833] env[62096]: DEBUG nova.scheduler.client.report [None req-bff733c1-b59e-456d-a598-27648ab77c9f tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 616.911148] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-bff733c1-b59e-456d-a598-27648ab77c9f tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Expecting reply to msg 9f0416b1303f4faf94ebf0f23ef667fe in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 616.917472] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 63af6917132140d593bdaa9e7ceff471 [ 616.921942] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9f0416b1303f4faf94ebf0f23ef667fe [ 616.940948] env[62096]: DEBUG nova.compute.manager [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] [instance: bbed3eed-f511-4b9e-9632-74841df01592] Starting instance... {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 616.942528] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Expecting reply to msg 2de5649751f848b1893720a974a23d8b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 616.980999] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2de5649751f848b1893720a974a23d8b [ 617.382526] env[62096]: DEBUG nova.compute.manager [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] [instance: 12a4b44d-b6aa-45e2-bba9-d73f41fa4b61] Start spawning the instance on the hypervisor. {{(pid=62096) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 617.408593] env[62096]: DEBUG nova.virt.hardware [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-19T11:52:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-19T11:51:55Z,direct_url=,disk_format='vmdk',id=fb764baa-3805-45c4-a694-aa91b0932110,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bd76085d004b408bbcf1ebf76ba2593e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-19T11:51:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 617.408820] env[62096]: DEBUG nova.virt.hardware [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Flavor limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 617.408981] env[62096]: DEBUG nova.virt.hardware [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Image limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 617.409154] env[62096]: DEBUG nova.virt.hardware [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Flavor pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 617.409291] env[62096]: DEBUG nova.virt.hardware [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Image pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 617.409447] env[62096]: DEBUG nova.virt.hardware [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 617.409651] env[62096]: DEBUG nova.virt.hardware [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 617.409801] env[62096]: DEBUG nova.virt.hardware [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 617.409960] env[62096]: DEBUG nova.virt.hardware [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Got 1 possible topologies {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 617.410133] env[62096]: DEBUG nova.virt.hardware [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 617.410341] env[62096]: DEBUG nova.virt.hardware [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 617.411066] env[62096]: DEBUG oslo_concurrency.lockutils [None req-bff733c1-b59e-456d-a598-27648ab77c9f tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.066s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 617.411656] env[62096]: ERROR nova.compute.manager [None req-bff733c1-b59e-456d-a598-27648ab77c9f tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 23cb598f-2b29-4f1e-ba72-c92aa3797f30, please check neutron logs for more information. [ 617.411656] env[62096]: ERROR nova.compute.manager [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] Traceback (most recent call last): [ 617.411656] env[62096]: ERROR nova.compute.manager [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 617.411656] env[62096]: ERROR nova.compute.manager [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] self.driver.spawn(context, instance, image_meta, [ 617.411656] env[62096]: ERROR nova.compute.manager [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 617.411656] env[62096]: ERROR nova.compute.manager [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] self._vmops.spawn(context, instance, image_meta, injected_files, [ 617.411656] env[62096]: ERROR nova.compute.manager [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 617.411656] env[62096]: ERROR nova.compute.manager [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] vm_ref = self.build_virtual_machine(instance, [ 617.411656] env[62096]: ERROR nova.compute.manager [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 617.411656] env[62096]: ERROR nova.compute.manager [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] vif_infos = vmwarevif.get_vif_info(self._session, [ 617.411656] env[62096]: ERROR nova.compute.manager [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 617.412077] env[62096]: ERROR nova.compute.manager [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] for vif in network_info: [ 617.412077] env[62096]: ERROR nova.compute.manager [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 617.412077] env[62096]: ERROR nova.compute.manager [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] return self._sync_wrapper(fn, *args, **kwargs) [ 617.412077] env[62096]: ERROR nova.compute.manager [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 617.412077] env[62096]: ERROR nova.compute.manager [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] self.wait() [ 617.412077] env[62096]: ERROR nova.compute.manager [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 617.412077] env[62096]: ERROR nova.compute.manager [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] self[:] = self._gt.wait() [ 617.412077] env[62096]: ERROR nova.compute.manager [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 617.412077] env[62096]: ERROR nova.compute.manager [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] return self._exit_event.wait() [ 617.412077] env[62096]: ERROR nova.compute.manager [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 617.412077] env[62096]: ERROR nova.compute.manager [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] result = hub.switch() [ 617.412077] env[62096]: ERROR nova.compute.manager [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 617.412077] env[62096]: ERROR nova.compute.manager [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] return self.greenlet.switch() [ 617.412402] env[62096]: ERROR nova.compute.manager [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 617.412402] env[62096]: ERROR nova.compute.manager [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] result = function(*args, **kwargs) [ 617.412402] env[62096]: ERROR nova.compute.manager [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 617.412402] env[62096]: ERROR nova.compute.manager [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] return func(*args, **kwargs) [ 617.412402] env[62096]: ERROR nova.compute.manager [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 617.412402] env[62096]: ERROR nova.compute.manager [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] raise e [ 617.412402] env[62096]: ERROR nova.compute.manager [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 617.412402] env[62096]: ERROR nova.compute.manager [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] nwinfo = self.network_api.allocate_for_instance( [ 617.412402] env[62096]: ERROR nova.compute.manager [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 617.412402] env[62096]: ERROR nova.compute.manager [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] created_port_ids = self._update_ports_for_instance( [ 617.412402] env[62096]: ERROR nova.compute.manager [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 617.412402] env[62096]: ERROR nova.compute.manager [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] with excutils.save_and_reraise_exception(): [ 617.412402] env[62096]: ERROR nova.compute.manager [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 617.412737] env[62096]: ERROR nova.compute.manager [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] self.force_reraise() [ 617.412737] env[62096]: ERROR nova.compute.manager [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 617.412737] env[62096]: ERROR nova.compute.manager [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] raise self.value [ 617.412737] env[62096]: ERROR nova.compute.manager [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 617.412737] env[62096]: ERROR nova.compute.manager [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] updated_port = self._update_port( [ 617.412737] env[62096]: ERROR nova.compute.manager [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 617.412737] env[62096]: ERROR nova.compute.manager [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] _ensure_no_port_binding_failure(port) [ 617.412737] env[62096]: ERROR nova.compute.manager [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 617.412737] env[62096]: ERROR nova.compute.manager [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] raise exception.PortBindingFailed(port_id=port['id']) [ 617.412737] env[62096]: ERROR nova.compute.manager [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] nova.exception.PortBindingFailed: Binding failed for port 23cb598f-2b29-4f1e-ba72-c92aa3797f30, please check neutron logs for more information. [ 617.412737] env[62096]: ERROR nova.compute.manager [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] [ 617.413047] env[62096]: DEBUG nova.compute.utils [None req-bff733c1-b59e-456d-a598-27648ab77c9f tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] Binding failed for port 23cb598f-2b29-4f1e-ba72-c92aa3797f30, please check neutron logs for more information. {{(pid=62096) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 617.414100] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-febaa38e-fa9e-401a-a984-b125cd3b2781 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.416972] env[62096]: DEBUG nova.compute.manager [None req-bff733c1-b59e-456d-a598-27648ab77c9f tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] Build of instance 6ac3df26-83ab-4519-a1e2-51286c1d1991 was re-scheduled: Binding failed for port 23cb598f-2b29-4f1e-ba72-c92aa3797f30, please check neutron logs for more information. {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 617.417427] env[62096]: DEBUG nova.compute.manager [None req-bff733c1-b59e-456d-a598-27648ab77c9f tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] Unplugging VIFs for instance {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 617.417609] env[62096]: DEBUG oslo_concurrency.lockutils [None req-bff733c1-b59e-456d-a598-27648ab77c9f tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Acquiring lock "refresh_cache-6ac3df26-83ab-4519-a1e2-51286c1d1991" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 617.417748] env[62096]: DEBUG oslo_concurrency.lockutils [None req-bff733c1-b59e-456d-a598-27648ab77c9f tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Acquired lock "refresh_cache-6ac3df26-83ab-4519-a1e2-51286c1d1991" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 617.417897] env[62096]: DEBUG nova.network.neutron [None req-bff733c1-b59e-456d-a598-27648ab77c9f tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 617.418297] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-bff733c1-b59e-456d-a598-27648ab77c9f tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Expecting reply to msg 1be8dd83cdf9496dbf9c29201d345c8d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 617.419053] env[62096]: DEBUG oslo_concurrency.lockutils [None req-a7dac61a-a9fc-4db2-bd21-9f05c56d2478 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.737s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 617.421636] env[62096]: INFO nova.compute.claims [None req-a7dac61a-a9fc-4db2-bd21-9f05c56d2478 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 617.426524] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a7dac61a-a9fc-4db2-bd21-9f05c56d2478 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Expecting reply to msg 4056cec27d774923899e75a220f41ec6 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 617.431989] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1be8dd83cdf9496dbf9c29201d345c8d [ 617.443063] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9810c3e3-e3c6-435e-9749-70cc9ef12d1c {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.471367] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] [instance: 12a4b44d-b6aa-45e2-bba9-d73f41fa4b61] Instance VIF info [] {{(pid=62096) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 617.483809] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Creating folder: OpenStack. Parent ref: group-v4. {{(pid=62096) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 617.483809] env[62096]: DEBUG oslo_concurrency.lockutils [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 617.483809] env[62096]: DEBUG nova.network.neutron [None req-bff733c1-b59e-456d-a598-27648ab77c9f tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 617.485399] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4056cec27d774923899e75a220f41ec6 [ 617.485797] env[62096]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e54ba0fb-36fa-4aff-a14f-96436b611aed {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.501957] env[62096]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 617.502144] env[62096]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=62096) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 617.502501] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Folder already exists: OpenStack. Parent ref: group-v4. {{(pid=62096) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 617.502707] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Creating folder: Project (31f68b01926c4649a8a8f38e0eed1a46). Parent ref: group-v107847. {{(pid=62096) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 617.503195] env[62096]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-db4537a4-d83f-42dd-97c4-ab84e2af92c4 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.517309] env[62096]: INFO nova.virt.vmwareapi.vm_util [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Created folder: Project (31f68b01926c4649a8a8f38e0eed1a46) in parent group-v107847. [ 617.517553] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Creating folder: Instances. Parent ref: group-v107856. {{(pid=62096) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 617.517812] env[62096]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b39a2987-a081-41a8-9104-afbd52bd3c88 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.528532] env[62096]: INFO nova.virt.vmwareapi.vm_util [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Created folder: Instances in parent group-v107856. [ 617.528532] env[62096]: DEBUG oslo.service.loopingcall [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 617.528532] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 12a4b44d-b6aa-45e2-bba9-d73f41fa4b61] Creating VM on the ESX host {{(pid=62096) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 617.528532] env[62096]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-73db2cf3-820d-4115-8d3a-c363cb96ab91 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.548220] env[62096]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 617.548220] env[62096]: value = "task-397366" [ 617.548220] env[62096]: _type = "Task" [ 617.548220] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 617.549037] env[62096]: DEBUG nova.network.neutron [None req-bff733c1-b59e-456d-a598-27648ab77c9f tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 617.549525] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-bff733c1-b59e-456d-a598-27648ab77c9f tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Expecting reply to msg 62e9510d8c2646d5bc5acba662fd2579 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 617.562371] env[62096]: DEBUG oslo_vmware.api [-] Task: {'id': task-397366, 'name': CreateVM_Task} progress is 0%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 617.562935] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 62e9510d8c2646d5bc5acba662fd2579 [ 617.937882] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a7dac61a-a9fc-4db2-bd21-9f05c56d2478 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Expecting reply to msg cfd9af66cc2149a98ecb5cb7c00a9d53 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 617.947010] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cfd9af66cc2149a98ecb5cb7c00a9d53 [ 618.056135] env[62096]: DEBUG oslo_concurrency.lockutils [None req-bff733c1-b59e-456d-a598-27648ab77c9f tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Releasing lock "refresh_cache-6ac3df26-83ab-4519-a1e2-51286c1d1991" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 618.056135] env[62096]: DEBUG nova.compute.manager [None req-bff733c1-b59e-456d-a598-27648ab77c9f tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 618.056135] env[62096]: DEBUG nova.compute.manager [None req-bff733c1-b59e-456d-a598-27648ab77c9f tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 618.056135] env[62096]: DEBUG nova.network.neutron [None req-bff733c1-b59e-456d-a598-27648ab77c9f tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 618.063611] env[62096]: DEBUG oslo_vmware.api [-] Task: {'id': task-397366, 'name': CreateVM_Task, 'duration_secs': 0.299158} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 618.063762] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 12a4b44d-b6aa-45e2-bba9-d73f41fa4b61] Created VM on the ESX host {{(pid=62096) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 618.064767] env[62096]: DEBUG oslo_vmware.service [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf5b95e4-8624-492e-a6aa-8fc3f2f8a2b5 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.071580] env[62096]: DEBUG nova.network.neutron [None req-bff733c1-b59e-456d-a598-27648ab77c9f tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 618.072508] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-bff733c1-b59e-456d-a598-27648ab77c9f tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Expecting reply to msg a7ac0923dcbc4c2b91ff6220317fcfd2 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 618.077604] env[62096]: DEBUG oslo_concurrency.lockutils [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 618.077762] env[62096]: DEBUG oslo_concurrency.lockutils [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Acquired lock "[datastore2] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 618.078559] env[62096]: DEBUG oslo_concurrency.lockutils [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 618.079111] env[62096]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-82b7a714-24a5-4670-8e78-16ef9ab07898 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.081338] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a7ac0923dcbc4c2b91ff6220317fcfd2 [ 618.085235] env[62096]: DEBUG oslo_vmware.api [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Waiting for the task: (returnval){ [ 618.085235] env[62096]: value = "session[5264b2d0-7e3b-3d3c-bfd1-bd630cad6eae]5298afdc-d999-bece-6c00-2d9346962d95" [ 618.085235] env[62096]: _type = "Task" [ 618.085235] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 618.096911] env[62096]: DEBUG oslo_vmware.api [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Task: {'id': session[5264b2d0-7e3b-3d3c-bfd1-bd630cad6eae]5298afdc-d999-bece-6c00-2d9346962d95, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 618.239596] env[62096]: DEBUG oslo_concurrency.lockutils [None req-d5f614b0-b494-4335-b00e-6b8de6a76cbe tempest-ServersWithSpecificFlavorTestJSON-68500714 tempest-ServersWithSpecificFlavorTestJSON-68500714-project-member] Acquiring lock "0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 618.239596] env[62096]: DEBUG oslo_concurrency.lockutils [None req-d5f614b0-b494-4335-b00e-6b8de6a76cbe tempest-ServersWithSpecificFlavorTestJSON-68500714 tempest-ServersWithSpecificFlavorTestJSON-68500714-project-member] Lock "0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 618.574364] env[62096]: DEBUG nova.network.neutron [None req-bff733c1-b59e-456d-a598-27648ab77c9f tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 618.574887] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-bff733c1-b59e-456d-a598-27648ab77c9f tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Expecting reply to msg 3974340ec7134589bea170a5e610c513 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 618.583800] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3974340ec7134589bea170a5e610c513 [ 618.599460] env[62096]: DEBUG oslo_concurrency.lockutils [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Releasing lock "[datastore2] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 618.599989] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] [instance: 12a4b44d-b6aa-45e2-bba9-d73f41fa4b61] Processing image fb764baa-3805-45c4-a694-aa91b0932110 {{(pid=62096) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 618.600296] env[62096]: DEBUG oslo_concurrency.lockutils [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110/fb764baa-3805-45c4-a694-aa91b0932110.vmdk" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 618.600480] env[62096]: DEBUG oslo_concurrency.lockutils [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Acquired lock "[datastore2] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110/fb764baa-3805-45c4-a694-aa91b0932110.vmdk" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 618.600930] env[62096]: DEBUG nova.virt.vmwareapi.ds_util [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62096) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 618.601219] env[62096]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ea40de85-35a9-4260-a5df-7c5607112bad {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.610888] env[62096]: DEBUG nova.virt.vmwareapi.ds_util [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62096) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 618.611144] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62096) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 618.612021] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1558f1a6-c6a1-4220-bc54-acd1238a9ac0 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.622636] env[62096]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-49b45d73-cf64-40f9-9fe7-3e5a6fe06737 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.629173] env[62096]: DEBUG oslo_vmware.api [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Waiting for the task: (returnval){ [ 618.629173] env[62096]: value = "session[5264b2d0-7e3b-3d3c-bfd1-bd630cad6eae]529557f4-b0f9-f071-a53b-93a3a6a0fcf3" [ 618.629173] env[62096]: _type = "Task" [ 618.629173] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 618.638483] env[62096]: DEBUG oslo_vmware.api [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Task: {'id': session[5264b2d0-7e3b-3d3c-bfd1-bd630cad6eae]529557f4-b0f9-f071-a53b-93a3a6a0fcf3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 618.857459] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cf4c208-49c5-4fdd-9526-de74ced25767 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.865637] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83649e9d-932e-4b55-a30a-01171136b32d {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.896346] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-262059e3-d3ee-4e99-a32c-da7c79ff465b {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.904947] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3964e98-9832-4546-b80d-9b6ada3ca338 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.921863] env[62096]: DEBUG nova.compute.provider_tree [None req-a7dac61a-a9fc-4db2-bd21-9f05c56d2478 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 618.922538] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a7dac61a-a9fc-4db2-bd21-9f05c56d2478 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Expecting reply to msg 664671eb0bfc4350b9da33cac35ac632 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 618.930409] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 664671eb0bfc4350b9da33cac35ac632 [ 619.078501] env[62096]: INFO nova.compute.manager [None req-bff733c1-b59e-456d-a598-27648ab77c9f tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] [instance: 6ac3df26-83ab-4519-a1e2-51286c1d1991] Took 1.02 seconds to deallocate network for instance. [ 619.078758] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-bff733c1-b59e-456d-a598-27648ab77c9f tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Expecting reply to msg a4eb2237c51944f8b49c2cd8a09f6712 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 619.120074] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a4eb2237c51944f8b49c2cd8a09f6712 [ 619.142223] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] [instance: 12a4b44d-b6aa-45e2-bba9-d73f41fa4b61] Preparing fetch location {{(pid=62096) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 619.142223] env[62096]: DEBUG nova.virt.vmwareapi.ds_util [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Creating directory with path [datastore2] vmware_temp/379ba144-d3b4-46de-a79e-364ed287ce5a/fb764baa-3805-45c4-a694-aa91b0932110 {{(pid=62096) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 619.142430] env[62096]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0e08defc-83d3-4024-8f70-b1d94a3e590f {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.169154] env[62096]: DEBUG nova.virt.vmwareapi.ds_util [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Created directory with path [datastore2] vmware_temp/379ba144-d3b4-46de-a79e-364ed287ce5a/fb764baa-3805-45c4-a694-aa91b0932110 {{(pid=62096) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 619.169637] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] [instance: 12a4b44d-b6aa-45e2-bba9-d73f41fa4b61] Fetch image to [datastore2] vmware_temp/379ba144-d3b4-46de-a79e-364ed287ce5a/fb764baa-3805-45c4-a694-aa91b0932110/tmp-sparse.vmdk {{(pid=62096) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 619.169882] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] [instance: 12a4b44d-b6aa-45e2-bba9-d73f41fa4b61] Downloading image file data fb764baa-3805-45c4-a694-aa91b0932110 to [datastore2] vmware_temp/379ba144-d3b4-46de-a79e-364ed287ce5a/fb764baa-3805-45c4-a694-aa91b0932110/tmp-sparse.vmdk on the data store datastore2 {{(pid=62096) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 619.170717] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0c2afed-4bf5-4917-867c-6155d0c37b75 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.182006] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d5fe8ce-885e-4187-9384-0c8902cfd373 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.205490] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c22c3ce0-0850-4791-bcaa-35580ccb884d {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.240897] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37eebce4-5c24-428b-bfc2-7c9604ae1380 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.255995] env[62096]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-7ed7b29d-4544-43be-bf5d-2457212904d9 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.354164] env[62096]: DEBUG nova.virt.vmwareapi.images [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] [instance: 12a4b44d-b6aa-45e2-bba9-d73f41fa4b61] Downloading image file data fb764baa-3805-45c4-a694-aa91b0932110 to the data store datastore2 {{(pid=62096) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 619.425141] env[62096]: DEBUG nova.scheduler.client.report [None req-a7dac61a-a9fc-4db2-bd21-9f05c56d2478 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 619.427597] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a7dac61a-a9fc-4db2-bd21-9f05c56d2478 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Expecting reply to msg 38ef8fbcd22640f1a4cf3115556d7ea4 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 619.429214] env[62096]: DEBUG oslo_vmware.rw_handles [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/379ba144-d3b4-46de-a79e-364ed287ce5a/fb764baa-3805-45c4-a694-aa91b0932110/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62096) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 619.496510] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 38ef8fbcd22640f1a4cf3115556d7ea4 [ 619.584268] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-bff733c1-b59e-456d-a598-27648ab77c9f tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Expecting reply to msg d0f2cf388ce347da8fddbde5740944e3 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 619.625542] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d0f2cf388ce347da8fddbde5740944e3 [ 619.936707] env[62096]: DEBUG oslo_concurrency.lockutils [None req-a7dac61a-a9fc-4db2-bd21-9f05c56d2478 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.517s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 619.937243] env[62096]: DEBUG nova.compute.manager [None req-a7dac61a-a9fc-4db2-bd21-9f05c56d2478 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] Start building networks asynchronously for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 619.939119] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a7dac61a-a9fc-4db2-bd21-9f05c56d2478 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Expecting reply to msg c2cae9ef3a6b49d8bf380db537145ae0 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 619.941037] env[62096]: DEBUG oslo_concurrency.lockutils [None req-8703d55f-3f3e-42bb-b5d4-8086e8f02c90 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.554s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 619.943355] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-8703d55f-3f3e-42bb-b5d4-8086e8f02c90 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Expecting reply to msg 982e0e2a8e534e70979605f94d43b2f5 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 620.000876] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c2cae9ef3a6b49d8bf380db537145ae0 [ 620.005709] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 982e0e2a8e534e70979605f94d43b2f5 [ 620.119220] env[62096]: INFO nova.scheduler.client.report [None req-bff733c1-b59e-456d-a598-27648ab77c9f tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Deleted allocations for instance 6ac3df26-83ab-4519-a1e2-51286c1d1991 [ 620.127357] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-bff733c1-b59e-456d-a598-27648ab77c9f tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Expecting reply to msg 7b23988761cf424a867d660875739175 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 620.147993] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7b23988761cf424a867d660875739175 [ 620.152357] env[62096]: DEBUG oslo_vmware.rw_handles [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Completed reading data from the image iterator. {{(pid=62096) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 620.152577] env[62096]: DEBUG oslo_vmware.rw_handles [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/379ba144-d3b4-46de-a79e-364ed287ce5a/fb764baa-3805-45c4-a694-aa91b0932110/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62096) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 620.227853] env[62096]: DEBUG nova.virt.vmwareapi.images [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] [instance: 12a4b44d-b6aa-45e2-bba9-d73f41fa4b61] Downloaded image file data fb764baa-3805-45c4-a694-aa91b0932110 to vmware_temp/379ba144-d3b4-46de-a79e-364ed287ce5a/fb764baa-3805-45c4-a694-aa91b0932110/tmp-sparse.vmdk on the data store datastore2 {{(pid=62096) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 620.229920] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] [instance: 12a4b44d-b6aa-45e2-bba9-d73f41fa4b61] Caching image {{(pid=62096) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 620.230176] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Copying Virtual Disk [datastore2] vmware_temp/379ba144-d3b4-46de-a79e-364ed287ce5a/fb764baa-3805-45c4-a694-aa91b0932110/tmp-sparse.vmdk to [datastore2] vmware_temp/379ba144-d3b4-46de-a79e-364ed287ce5a/fb764baa-3805-45c4-a694-aa91b0932110/fb764baa-3805-45c4-a694-aa91b0932110.vmdk {{(pid=62096) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 620.230784] env[62096]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-05440712-2bf0-4c10-bc99-a40c621d0368 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.242261] env[62096]: DEBUG oslo_vmware.api [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Waiting for the task: (returnval){ [ 620.242261] env[62096]: value = "task-397367" [ 620.242261] env[62096]: _type = "Task" [ 620.242261] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 620.251928] env[62096]: DEBUG oslo_vmware.api [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Task: {'id': task-397367, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 620.442905] env[62096]: DEBUG nova.compute.utils [None req-a7dac61a-a9fc-4db2-bd21-9f05c56d2478 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Using /dev/sd instead of None {{(pid=62096) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 620.443751] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a7dac61a-a9fc-4db2-bd21-9f05c56d2478 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Expecting reply to msg e5292a12f7e64a138efab0d3b45d2316 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 620.444585] env[62096]: DEBUG nova.compute.manager [None req-a7dac61a-a9fc-4db2-bd21-9f05c56d2478 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] Allocating IP information in the background. {{(pid=62096) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 620.444695] env[62096]: DEBUG nova.network.neutron [None req-a7dac61a-a9fc-4db2-bd21-9f05c56d2478 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] allocate_for_instance() {{(pid=62096) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 620.464974] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e5292a12f7e64a138efab0d3b45d2316 [ 620.519377] env[62096]: DEBUG nova.policy [None req-a7dac61a-a9fc-4db2-bd21-9f05c56d2478 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'eccf4b6b1d2b496796ad12d6caad16ed', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '53edc9a849714cedab5fcd7b03ca6916', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62096) authorize /opt/stack/nova/nova/policy.py:203}} [ 620.629817] env[62096]: DEBUG oslo_concurrency.lockutils [None req-bff733c1-b59e-456d-a598-27648ab77c9f tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Lock "6ac3df26-83ab-4519-a1e2-51286c1d1991" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 66.461s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 620.630460] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-104fcf9b-d4a1-4685-8056-e7a4d7fd3107 tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] Expecting reply to msg c5596b917f154832b94cf2169526fca5 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 620.646599] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c5596b917f154832b94cf2169526fca5 [ 620.753253] env[62096]: DEBUG oslo_vmware.api [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Task: {'id': task-397367, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 620.949766] env[62096]: DEBUG nova.compute.manager [None req-a7dac61a-a9fc-4db2-bd21-9f05c56d2478 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] Start building block device mappings for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 620.951611] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a7dac61a-a9fc-4db2-bd21-9f05c56d2478 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Expecting reply to msg cacab8dc06a1433bb2df75f67f1c300a in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 620.970975] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65aa75a2-c1e4-4b71-8c92-518151095416 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.978171] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-880e026d-f08b-47e1-a6c8-e664cf9a2733 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.012899] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cacab8dc06a1433bb2df75f67f1c300a [ 621.013825] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17aaf487-f020-4f89-a444-98fbe71b3f9d {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.025891] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f02ffa4f-86b2-4ff4-9055-c89b3ec6bf9c {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.041237] env[62096]: DEBUG nova.compute.provider_tree [None req-8703d55f-3f3e-42bb-b5d4-8086e8f02c90 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 621.041780] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-8703d55f-3f3e-42bb-b5d4-8086e8f02c90 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Expecting reply to msg 3b0e9ac874e8424bb6a94e3e8c4cf413 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 621.051283] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3b0e9ac874e8424bb6a94e3e8c4cf413 [ 621.139848] env[62096]: DEBUG nova.compute.manager [None req-104fcf9b-d4a1-4685-8056-e7a4d7fd3107 tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] [instance: 06b9105a-8dcb-4bff-bba2-05e179036f24] Starting instance... {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 621.141628] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-104fcf9b-d4a1-4685-8056-e7a4d7fd3107 tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] Expecting reply to msg 1e1b962414184a2ab759fe408b42d309 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 621.210490] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1e1b962414184a2ab759fe408b42d309 [ 621.242283] env[62096]: DEBUG nova.network.neutron [None req-a7dac61a-a9fc-4db2-bd21-9f05c56d2478 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] Successfully created port: 62ed3475-1d31-4edd-b7c2-33b14a3775c7 {{(pid=62096) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 621.254393] env[62096]: DEBUG oslo_vmware.api [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Task: {'id': task-397367, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 621.456648] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a7dac61a-a9fc-4db2-bd21-9f05c56d2478 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Expecting reply to msg 79e4dcf3c93c45bfa882e53ca15e0fc3 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 621.499715] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 79e4dcf3c93c45bfa882e53ca15e0fc3 [ 621.547798] env[62096]: DEBUG nova.scheduler.client.report [None req-8703d55f-3f3e-42bb-b5d4-8086e8f02c90 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 621.547798] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-8703d55f-3f3e-42bb-b5d4-8086e8f02c90 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Expecting reply to msg fb325b003a5b43a7b06f7c042fa253ca in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 621.563311] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fb325b003a5b43a7b06f7c042fa253ca [ 621.668981] env[62096]: DEBUG oslo_concurrency.lockutils [None req-104fcf9b-d4a1-4685-8056-e7a4d7fd3107 tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 621.755912] env[62096]: DEBUG oslo_vmware.api [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Task: {'id': task-397367, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 621.960607] env[62096]: DEBUG nova.compute.manager [None req-a7dac61a-a9fc-4db2-bd21-9f05c56d2478 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] Start spawning the instance on the hypervisor. {{(pid=62096) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 621.990374] env[62096]: DEBUG nova.virt.hardware [None req-a7dac61a-a9fc-4db2-bd21-9f05c56d2478 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-19T11:52:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-19T11:51:55Z,direct_url=,disk_format='vmdk',id=fb764baa-3805-45c4-a694-aa91b0932110,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bd76085d004b408bbcf1ebf76ba2593e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-19T11:51:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 621.990597] env[62096]: DEBUG nova.virt.hardware [None req-a7dac61a-a9fc-4db2-bd21-9f05c56d2478 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Flavor limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 621.990746] env[62096]: DEBUG nova.virt.hardware [None req-a7dac61a-a9fc-4db2-bd21-9f05c56d2478 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Image limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 621.991024] env[62096]: DEBUG nova.virt.hardware [None req-a7dac61a-a9fc-4db2-bd21-9f05c56d2478 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Flavor pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 621.991090] env[62096]: DEBUG nova.virt.hardware [None req-a7dac61a-a9fc-4db2-bd21-9f05c56d2478 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Image pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 621.991219] env[62096]: DEBUG nova.virt.hardware [None req-a7dac61a-a9fc-4db2-bd21-9f05c56d2478 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 621.991419] env[62096]: DEBUG nova.virt.hardware [None req-a7dac61a-a9fc-4db2-bd21-9f05c56d2478 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 621.991588] env[62096]: DEBUG nova.virt.hardware [None req-a7dac61a-a9fc-4db2-bd21-9f05c56d2478 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 621.992411] env[62096]: DEBUG nova.virt.hardware [None req-a7dac61a-a9fc-4db2-bd21-9f05c56d2478 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Got 1 possible topologies {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 621.992411] env[62096]: DEBUG nova.virt.hardware [None req-a7dac61a-a9fc-4db2-bd21-9f05c56d2478 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 621.992411] env[62096]: DEBUG nova.virt.hardware [None req-a7dac61a-a9fc-4db2-bd21-9f05c56d2478 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 621.992911] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-825fbb8e-37c1-46b6-8c08-0412066dc32e {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.001746] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88a5e9c3-c8e1-4f79-909a-e2ecf92ddae9 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.054973] env[62096]: DEBUG oslo_concurrency.lockutils [None req-8703d55f-3f3e-42bb-b5d4-8086e8f02c90 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.114s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 622.055498] env[62096]: ERROR nova.compute.manager [None req-8703d55f-3f3e-42bb-b5d4-8086e8f02c90 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port e88b5018-a026-4377-aaa2-562d1c2db094, please check neutron logs for more information. [ 622.055498] env[62096]: ERROR nova.compute.manager [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] Traceback (most recent call last): [ 622.055498] env[62096]: ERROR nova.compute.manager [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 622.055498] env[62096]: ERROR nova.compute.manager [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] self.driver.spawn(context, instance, image_meta, [ 622.055498] env[62096]: ERROR nova.compute.manager [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 622.055498] env[62096]: ERROR nova.compute.manager [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 622.055498] env[62096]: ERROR nova.compute.manager [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 622.055498] env[62096]: ERROR nova.compute.manager [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] vm_ref = self.build_virtual_machine(instance, [ 622.055498] env[62096]: ERROR nova.compute.manager [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 622.055498] env[62096]: ERROR nova.compute.manager [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] vif_infos = vmwarevif.get_vif_info(self._session, [ 622.055498] env[62096]: ERROR nova.compute.manager [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 622.055829] env[62096]: ERROR nova.compute.manager [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] for vif in network_info: [ 622.055829] env[62096]: ERROR nova.compute.manager [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 622.055829] env[62096]: ERROR nova.compute.manager [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] return self._sync_wrapper(fn, *args, **kwargs) [ 622.055829] env[62096]: ERROR nova.compute.manager [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 622.055829] env[62096]: ERROR nova.compute.manager [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] self.wait() [ 622.055829] env[62096]: ERROR nova.compute.manager [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 622.055829] env[62096]: ERROR nova.compute.manager [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] self[:] = self._gt.wait() [ 622.055829] env[62096]: ERROR nova.compute.manager [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 622.055829] env[62096]: ERROR nova.compute.manager [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] return self._exit_event.wait() [ 622.055829] env[62096]: ERROR nova.compute.manager [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 622.055829] env[62096]: ERROR nova.compute.manager [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] result = hub.switch() [ 622.055829] env[62096]: ERROR nova.compute.manager [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 622.055829] env[62096]: ERROR nova.compute.manager [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] return self.greenlet.switch() [ 622.056176] env[62096]: ERROR nova.compute.manager [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 622.056176] env[62096]: ERROR nova.compute.manager [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] result = function(*args, **kwargs) [ 622.056176] env[62096]: ERROR nova.compute.manager [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 622.056176] env[62096]: ERROR nova.compute.manager [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] return func(*args, **kwargs) [ 622.056176] env[62096]: ERROR nova.compute.manager [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 622.056176] env[62096]: ERROR nova.compute.manager [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] raise e [ 622.056176] env[62096]: ERROR nova.compute.manager [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 622.056176] env[62096]: ERROR nova.compute.manager [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] nwinfo = self.network_api.allocate_for_instance( [ 622.056176] env[62096]: ERROR nova.compute.manager [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 622.056176] env[62096]: ERROR nova.compute.manager [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] created_port_ids = self._update_ports_for_instance( [ 622.056176] env[62096]: ERROR nova.compute.manager [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 622.056176] env[62096]: ERROR nova.compute.manager [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] with excutils.save_and_reraise_exception(): [ 622.056176] env[62096]: ERROR nova.compute.manager [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 622.056487] env[62096]: ERROR nova.compute.manager [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] self.force_reraise() [ 622.056487] env[62096]: ERROR nova.compute.manager [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 622.056487] env[62096]: ERROR nova.compute.manager [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] raise self.value [ 622.056487] env[62096]: ERROR nova.compute.manager [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 622.056487] env[62096]: ERROR nova.compute.manager [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] updated_port = self._update_port( [ 622.056487] env[62096]: ERROR nova.compute.manager [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 622.056487] env[62096]: ERROR nova.compute.manager [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] _ensure_no_port_binding_failure(port) [ 622.056487] env[62096]: ERROR nova.compute.manager [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 622.056487] env[62096]: ERROR nova.compute.manager [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] raise exception.PortBindingFailed(port_id=port['id']) [ 622.056487] env[62096]: ERROR nova.compute.manager [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] nova.exception.PortBindingFailed: Binding failed for port e88b5018-a026-4377-aaa2-562d1c2db094, please check neutron logs for more information. [ 622.056487] env[62096]: ERROR nova.compute.manager [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] [ 622.056756] env[62096]: DEBUG nova.compute.utils [None req-8703d55f-3f3e-42bb-b5d4-8086e8f02c90 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] Binding failed for port e88b5018-a026-4377-aaa2-562d1c2db094, please check neutron logs for more information. {{(pid=62096) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 622.057477] env[62096]: DEBUG oslo_concurrency.lockutils [None req-b15e5306-4a15-4327-a851-ae7862f80688 tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.646s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 622.058914] env[62096]: INFO nova.compute.claims [None req-b15e5306-4a15-4327-a851-ae7862f80688 tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 622.060841] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b15e5306-4a15-4327-a851-ae7862f80688 tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Expecting reply to msg 607905cd1ebd40a4b0bab3fc54feb8d0 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 622.061785] env[62096]: DEBUG nova.compute.manager [None req-8703d55f-3f3e-42bb-b5d4-8086e8f02c90 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] Build of instance 3855c98d-4ade-4f6f-85aa-1297df5a39a9 was re-scheduled: Binding failed for port e88b5018-a026-4377-aaa2-562d1c2db094, please check neutron logs for more information. {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 622.062244] env[62096]: DEBUG nova.compute.manager [None req-8703d55f-3f3e-42bb-b5d4-8086e8f02c90 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] Unplugging VIFs for instance {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 622.062454] env[62096]: DEBUG oslo_concurrency.lockutils [None req-8703d55f-3f3e-42bb-b5d4-8086e8f02c90 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Acquiring lock "refresh_cache-3855c98d-4ade-4f6f-85aa-1297df5a39a9" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 622.062590] env[62096]: DEBUG oslo_concurrency.lockutils [None req-8703d55f-3f3e-42bb-b5d4-8086e8f02c90 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Acquired lock "refresh_cache-3855c98d-4ade-4f6f-85aa-1297df5a39a9" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 622.062739] env[62096]: DEBUG nova.network.neutron [None req-8703d55f-3f3e-42bb-b5d4-8086e8f02c90 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 622.063122] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-8703d55f-3f3e-42bb-b5d4-8086e8f02c90 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Expecting reply to msg 7488565581ab49f9b800071731b4c1fc in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 622.071177] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7488565581ab49f9b800071731b4c1fc [ 622.103713] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 607905cd1ebd40a4b0bab3fc54feb8d0 [ 622.257033] env[62096]: DEBUG oslo_vmware.api [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Task: {'id': task-397367, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.750682} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 622.257550] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Copied Virtual Disk [datastore2] vmware_temp/379ba144-d3b4-46de-a79e-364ed287ce5a/fb764baa-3805-45c4-a694-aa91b0932110/tmp-sparse.vmdk to [datastore2] vmware_temp/379ba144-d3b4-46de-a79e-364ed287ce5a/fb764baa-3805-45c4-a694-aa91b0932110/fb764baa-3805-45c4-a694-aa91b0932110.vmdk {{(pid=62096) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 622.257550] env[62096]: DEBUG nova.virt.vmwareapi.ds_util [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Deleting the datastore file [datastore2] vmware_temp/379ba144-d3b4-46de-a79e-364ed287ce5a/fb764baa-3805-45c4-a694-aa91b0932110/tmp-sparse.vmdk {{(pid=62096) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 622.257802] env[62096]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7c925bdb-6408-4af9-a711-274cb93841c2 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.266764] env[62096]: DEBUG oslo_vmware.api [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Waiting for the task: (returnval){ [ 622.266764] env[62096]: value = "task-397368" [ 622.266764] env[62096]: _type = "Task" [ 622.266764] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 622.276028] env[62096]: DEBUG oslo_vmware.api [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Task: {'id': task-397368, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.566619] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b15e5306-4a15-4327-a851-ae7862f80688 tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Expecting reply to msg f37b6303f6a2406dbf818675e809759a in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 622.575410] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f37b6303f6a2406dbf818675e809759a [ 622.587417] env[62096]: DEBUG nova.network.neutron [None req-8703d55f-3f3e-42bb-b5d4-8086e8f02c90 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 622.762582] env[62096]: DEBUG oslo_concurrency.lockutils [None req-a94ae5da-1291-4194-a0a7-fa8d95eb4821 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Acquiring lock "dafff089-f6e4-4269-bf0e-ea305c11ff36" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 622.762832] env[62096]: DEBUG oslo_concurrency.lockutils [None req-a94ae5da-1291-4194-a0a7-fa8d95eb4821 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Lock "dafff089-f6e4-4269-bf0e-ea305c11ff36" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 622.777204] env[62096]: DEBUG oslo_vmware.api [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Task: {'id': task-397368, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.024273} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 622.777497] env[62096]: DEBUG nova.virt.vmwareapi.ds_util [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Deleted the datastore file {{(pid=62096) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 622.777990] env[62096]: DEBUG nova.virt.vmwareapi.ds_util [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Moving file from [datastore2] vmware_temp/379ba144-d3b4-46de-a79e-364ed287ce5a/fb764baa-3805-45c4-a694-aa91b0932110 to [datastore2] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110. {{(pid=62096) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 622.777990] env[62096]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-9e6c9390-0752-403f-bd97-2e2a355ce232 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.799002] env[62096]: DEBUG oslo_vmware.api [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Waiting for the task: (returnval){ [ 622.799002] env[62096]: value = "task-397369" [ 622.799002] env[62096]: _type = "Task" [ 622.799002] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 622.800045] env[62096]: DEBUG nova.network.neutron [None req-8703d55f-3f3e-42bb-b5d4-8086e8f02c90 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 622.800563] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-8703d55f-3f3e-42bb-b5d4-8086e8f02c90 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Expecting reply to msg 7e7f1be8c873486487d26eda969b1ab6 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 622.811843] env[62096]: DEBUG oslo_vmware.api [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Task: {'id': task-397369, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.812834] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7e7f1be8c873486487d26eda969b1ab6 [ 623.303111] env[62096]: DEBUG oslo_concurrency.lockutils [None req-8703d55f-3f3e-42bb-b5d4-8086e8f02c90 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Releasing lock "refresh_cache-3855c98d-4ade-4f6f-85aa-1297df5a39a9" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 623.303435] env[62096]: DEBUG nova.compute.manager [None req-8703d55f-3f3e-42bb-b5d4-8086e8f02c90 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 623.303707] env[62096]: DEBUG nova.compute.manager [None req-8703d55f-3f3e-42bb-b5d4-8086e8f02c90 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 623.303960] env[62096]: DEBUG nova.network.neutron [None req-8703d55f-3f3e-42bb-b5d4-8086e8f02c90 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 623.317564] env[62096]: DEBUG oslo_vmware.api [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Task: {'id': task-397369, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.04598} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 623.317801] env[62096]: DEBUG nova.virt.vmwareapi.ds_util [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] File moved {{(pid=62096) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 623.318000] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] [instance: 12a4b44d-b6aa-45e2-bba9-d73f41fa4b61] Cleaning up location [datastore2] vmware_temp/379ba144-d3b4-46de-a79e-364ed287ce5a {{(pid=62096) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 623.318155] env[62096]: DEBUG nova.virt.vmwareapi.ds_util [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Deleting the datastore file [datastore2] vmware_temp/379ba144-d3b4-46de-a79e-364ed287ce5a {{(pid=62096) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 623.318402] env[62096]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-58a29317-0dec-4065-9bc7-fd40bff546d4 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.325886] env[62096]: DEBUG oslo_vmware.api [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Waiting for the task: (returnval){ [ 623.325886] env[62096]: value = "task-397370" [ 623.325886] env[62096]: _type = "Task" [ 623.325886] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 623.338468] env[62096]: DEBUG oslo_vmware.api [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Task: {'id': task-397370, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.339279] env[62096]: DEBUG nova.network.neutron [None req-8703d55f-3f3e-42bb-b5d4-8086e8f02c90 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 623.340046] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-8703d55f-3f3e-42bb-b5d4-8086e8f02c90 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Expecting reply to msg bf5adb2791aa4825b21457c52a53136b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 623.347405] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bf5adb2791aa4825b21457c52a53136b [ 623.563691] env[62096]: DEBUG oslo_concurrency.lockutils [None req-f7b2ac44-6793-4b58-b560-5a800041e557 tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Acquiring lock "7bfac3e2-f06c-4690-9215-a5f67a67c5bd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 623.563691] env[62096]: DEBUG oslo_concurrency.lockutils [None req-f7b2ac44-6793-4b58-b560-5a800041e557 tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Lock "7bfac3e2-f06c-4690-9215-a5f67a67c5bd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 623.573497] env[62096]: ERROR nova.compute.manager [None req-a7dac61a-a9fc-4db2-bd21-9f05c56d2478 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 62ed3475-1d31-4edd-b7c2-33b14a3775c7, please check neutron logs for more information. [ 623.573497] env[62096]: ERROR nova.compute.manager Traceback (most recent call last): [ 623.573497] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 623.573497] env[62096]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 623.573497] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 623.573497] env[62096]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 623.573497] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 623.573497] env[62096]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 623.573497] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 623.573497] env[62096]: ERROR nova.compute.manager self.force_reraise() [ 623.573497] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 623.573497] env[62096]: ERROR nova.compute.manager raise self.value [ 623.573497] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 623.573497] env[62096]: ERROR nova.compute.manager updated_port = self._update_port( [ 623.573497] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 623.573497] env[62096]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 623.573994] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 623.573994] env[62096]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 623.573994] env[62096]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 62ed3475-1d31-4edd-b7c2-33b14a3775c7, please check neutron logs for more information. [ 623.573994] env[62096]: ERROR nova.compute.manager [ 623.574389] env[62096]: Traceback (most recent call last): [ 623.574482] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 623.574482] env[62096]: listener.cb(fileno) [ 623.574547] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 623.574547] env[62096]: result = function(*args, **kwargs) [ 623.574619] env[62096]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 623.574619] env[62096]: return func(*args, **kwargs) [ 623.574762] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 623.574762] env[62096]: raise e [ 623.574845] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 623.574845] env[62096]: nwinfo = self.network_api.allocate_for_instance( [ 623.576043] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 623.576043] env[62096]: created_port_ids = self._update_ports_for_instance( [ 623.576139] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 623.576139] env[62096]: with excutils.save_and_reraise_exception(): [ 623.576206] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 623.576206] env[62096]: self.force_reraise() [ 623.576292] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 623.576292] env[62096]: raise self.value [ 623.576377] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 623.576377] env[62096]: updated_port = self._update_port( [ 623.576449] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 623.576449] env[62096]: _ensure_no_port_binding_failure(port) [ 623.576510] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 623.576510] env[62096]: raise exception.PortBindingFailed(port_id=port['id']) [ 623.576603] env[62096]: nova.exception.PortBindingFailed: Binding failed for port 62ed3475-1d31-4edd-b7c2-33b14a3775c7, please check neutron logs for more information. [ 623.576664] env[62096]: Removing descriptor: 14 [ 623.578022] env[62096]: ERROR nova.compute.manager [None req-a7dac61a-a9fc-4db2-bd21-9f05c56d2478 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 62ed3475-1d31-4edd-b7c2-33b14a3775c7, please check neutron logs for more information. [ 623.578022] env[62096]: ERROR nova.compute.manager [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] Traceback (most recent call last): [ 623.578022] env[62096]: ERROR nova.compute.manager [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 623.578022] env[62096]: ERROR nova.compute.manager [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] yield resources [ 623.578022] env[62096]: ERROR nova.compute.manager [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 623.578022] env[62096]: ERROR nova.compute.manager [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] self.driver.spawn(context, instance, image_meta, [ 623.578022] env[62096]: ERROR nova.compute.manager [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 623.578022] env[62096]: ERROR nova.compute.manager [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] self._vmops.spawn(context, instance, image_meta, injected_files, [ 623.578022] env[62096]: ERROR nova.compute.manager [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 623.578022] env[62096]: ERROR nova.compute.manager [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] vm_ref = self.build_virtual_machine(instance, [ 623.578022] env[62096]: ERROR nova.compute.manager [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 623.578355] env[62096]: ERROR nova.compute.manager [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] vif_infos = vmwarevif.get_vif_info(self._session, [ 623.578355] env[62096]: ERROR nova.compute.manager [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 623.578355] env[62096]: ERROR nova.compute.manager [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] for vif in network_info: [ 623.578355] env[62096]: ERROR nova.compute.manager [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 623.578355] env[62096]: ERROR nova.compute.manager [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] return self._sync_wrapper(fn, *args, **kwargs) [ 623.578355] env[62096]: ERROR nova.compute.manager [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 623.578355] env[62096]: ERROR nova.compute.manager [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] self.wait() [ 623.578355] env[62096]: ERROR nova.compute.manager [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 623.578355] env[62096]: ERROR nova.compute.manager [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] self[:] = self._gt.wait() [ 623.578355] env[62096]: ERROR nova.compute.manager [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 623.578355] env[62096]: ERROR nova.compute.manager [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] return self._exit_event.wait() [ 623.578355] env[62096]: ERROR nova.compute.manager [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 623.578355] env[62096]: ERROR nova.compute.manager [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] result = hub.switch() [ 623.578682] env[62096]: ERROR nova.compute.manager [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 623.578682] env[62096]: ERROR nova.compute.manager [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] return self.greenlet.switch() [ 623.578682] env[62096]: ERROR nova.compute.manager [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 623.578682] env[62096]: ERROR nova.compute.manager [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] result = function(*args, **kwargs) [ 623.578682] env[62096]: ERROR nova.compute.manager [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 623.578682] env[62096]: ERROR nova.compute.manager [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] return func(*args, **kwargs) [ 623.578682] env[62096]: ERROR nova.compute.manager [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 623.578682] env[62096]: ERROR nova.compute.manager [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] raise e [ 623.578682] env[62096]: ERROR nova.compute.manager [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 623.578682] env[62096]: ERROR nova.compute.manager [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] nwinfo = self.network_api.allocate_for_instance( [ 623.578682] env[62096]: ERROR nova.compute.manager [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 623.578682] env[62096]: ERROR nova.compute.manager [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] created_port_ids = self._update_ports_for_instance( [ 623.578682] env[62096]: ERROR nova.compute.manager [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 623.579011] env[62096]: ERROR nova.compute.manager [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] with excutils.save_and_reraise_exception(): [ 623.579011] env[62096]: ERROR nova.compute.manager [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 623.579011] env[62096]: ERROR nova.compute.manager [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] self.force_reraise() [ 623.579011] env[62096]: ERROR nova.compute.manager [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 623.579011] env[62096]: ERROR nova.compute.manager [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] raise self.value [ 623.579011] env[62096]: ERROR nova.compute.manager [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 623.579011] env[62096]: ERROR nova.compute.manager [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] updated_port = self._update_port( [ 623.579011] env[62096]: ERROR nova.compute.manager [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 623.579011] env[62096]: ERROR nova.compute.manager [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] _ensure_no_port_binding_failure(port) [ 623.579011] env[62096]: ERROR nova.compute.manager [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 623.579011] env[62096]: ERROR nova.compute.manager [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] raise exception.PortBindingFailed(port_id=port['id']) [ 623.579011] env[62096]: ERROR nova.compute.manager [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] nova.exception.PortBindingFailed: Binding failed for port 62ed3475-1d31-4edd-b7c2-33b14a3775c7, please check neutron logs for more information. [ 623.579011] env[62096]: ERROR nova.compute.manager [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] [ 623.579713] env[62096]: INFO nova.compute.manager [None req-a7dac61a-a9fc-4db2-bd21-9f05c56d2478 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] Terminating instance [ 623.582723] env[62096]: DEBUG oslo_concurrency.lockutils [None req-a7dac61a-a9fc-4db2-bd21-9f05c56d2478 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Acquiring lock "refresh_cache-805c4c87-4e1a-4733-86a0-4c82daf615eb" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 623.583180] env[62096]: DEBUG oslo_concurrency.lockutils [None req-a7dac61a-a9fc-4db2-bd21-9f05c56d2478 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Acquired lock "refresh_cache-805c4c87-4e1a-4733-86a0-4c82daf615eb" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 623.583719] env[62096]: DEBUG nova.network.neutron [None req-a7dac61a-a9fc-4db2-bd21-9f05c56d2478 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 623.584332] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a7dac61a-a9fc-4db2-bd21-9f05c56d2478 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Expecting reply to msg be54f154edd749e4a6078d729f046ef8 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 623.592857] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg be54f154edd749e4a6078d729f046ef8 [ 623.623740] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28d7693a-5dfa-43cc-be9c-05dc8ac5c1e9 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.632329] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02db71dc-4b63-4095-8411-5aa034f9c760 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.665726] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6995652-77f4-4c80-9c9a-5f44f6da2ad7 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.674439] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b50b88e-cd92-40ad-b4d4-c53ff94ceb1c {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.688518] env[62096]: DEBUG nova.compute.provider_tree [None req-b15e5306-4a15-4327-a851-ae7862f80688 tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 623.689027] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b15e5306-4a15-4327-a851-ae7862f80688 tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Expecting reply to msg 337e986deb424cdbb522e9d2bd3e5353 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 623.697693] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 337e986deb424cdbb522e9d2bd3e5353 [ 623.712989] env[62096]: DEBUG nova.compute.manager [req-d7069ec9-ed56-4ed1-9d4f-ffbd313bc539 req-7d4f02c9-0dd6-4013-8939-867769869b92 service nova] [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] Received event network-changed-62ed3475-1d31-4edd-b7c2-33b14a3775c7 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 623.712989] env[62096]: DEBUG nova.compute.manager [req-d7069ec9-ed56-4ed1-9d4f-ffbd313bc539 req-7d4f02c9-0dd6-4013-8939-867769869b92 service nova] [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] Refreshing instance network info cache due to event network-changed-62ed3475-1d31-4edd-b7c2-33b14a3775c7. {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 623.712989] env[62096]: DEBUG oslo_concurrency.lockutils [req-d7069ec9-ed56-4ed1-9d4f-ffbd313bc539 req-7d4f02c9-0dd6-4013-8939-867769869b92 service nova] Acquiring lock "refresh_cache-805c4c87-4e1a-4733-86a0-4c82daf615eb" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 623.838921] env[62096]: DEBUG oslo_vmware.api [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Task: {'id': task-397370, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.032226} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 623.839185] env[62096]: DEBUG nova.virt.vmwareapi.ds_util [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Deleted the datastore file {{(pid=62096) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 623.839971] env[62096]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0493301c-c724-4a74-9d7a-43fdfe78d1bd {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.842402] env[62096]: DEBUG nova.network.neutron [None req-8703d55f-3f3e-42bb-b5d4-8086e8f02c90 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 623.843171] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-8703d55f-3f3e-42bb-b5d4-8086e8f02c90 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Expecting reply to msg 0a3f045b632f49df93de6462eb7f679c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 623.848130] env[62096]: DEBUG oslo_vmware.api [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Waiting for the task: (returnval){ [ 623.848130] env[62096]: value = "session[5264b2d0-7e3b-3d3c-bfd1-bd630cad6eae]5274488f-9083-4260-491c-55f39b0ff56d" [ 623.848130] env[62096]: _type = "Task" [ 623.848130] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 623.853955] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0a3f045b632f49df93de6462eb7f679c [ 623.868324] env[62096]: DEBUG oslo_vmware.api [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Task: {'id': session[5264b2d0-7e3b-3d3c-bfd1-bd630cad6eae]5274488f-9083-4260-491c-55f39b0ff56d, 'name': SearchDatastore_Task, 'duration_secs': 0.009922} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 623.868528] env[62096]: DEBUG oslo_concurrency.lockutils [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Releasing lock "[datastore2] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110/fb764baa-3805-45c4-a694-aa91b0932110.vmdk" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 623.869083] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110/fb764baa-3805-45c4-a694-aa91b0932110.vmdk to [datastore2] 12a4b44d-b6aa-45e2-bba9-d73f41fa4b61/12a4b44d-b6aa-45e2-bba9-d73f41fa4b61.vmdk {{(pid=62096) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 623.869083] env[62096]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ea9f9476-cefb-4b05-8b36-fabb576f9ebc {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.877433] env[62096]: DEBUG oslo_vmware.api [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Waiting for the task: (returnval){ [ 623.877433] env[62096]: value = "task-397371" [ 623.877433] env[62096]: _type = "Task" [ 623.877433] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 623.887037] env[62096]: DEBUG oslo_vmware.api [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Task: {'id': task-397371, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 624.113305] env[62096]: DEBUG nova.network.neutron [None req-a7dac61a-a9fc-4db2-bd21-9f05c56d2478 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 624.192123] env[62096]: DEBUG nova.scheduler.client.report [None req-b15e5306-4a15-4327-a851-ae7862f80688 tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 624.194477] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b15e5306-4a15-4327-a851-ae7862f80688 tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Expecting reply to msg c5a2bace2a9d4b92bc2c1ea1512286a8 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 624.206041] env[62096]: DEBUG nova.network.neutron [None req-a7dac61a-a9fc-4db2-bd21-9f05c56d2478 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 624.206651] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a7dac61a-a9fc-4db2-bd21-9f05c56d2478 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Expecting reply to msg e1786bec7874481cb2a304e5355cde17 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 624.219361] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e1786bec7874481cb2a304e5355cde17 [ 624.220197] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c5a2bace2a9d4b92bc2c1ea1512286a8 [ 624.345691] env[62096]: INFO nova.compute.manager [None req-8703d55f-3f3e-42bb-b5d4-8086e8f02c90 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] [instance: 3855c98d-4ade-4f6f-85aa-1297df5a39a9] Took 1.04 seconds to deallocate network for instance. [ 624.347589] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-8703d55f-3f3e-42bb-b5d4-8086e8f02c90 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Expecting reply to msg 2f1f3e0b962841ef89d2aa9c91302f83 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 624.388346] env[62096]: DEBUG oslo_vmware.api [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Task: {'id': task-397371, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 624.403522] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2f1f3e0b962841ef89d2aa9c91302f83 [ 624.420761] env[62096]: DEBUG oslo_concurrency.lockutils [None req-5fe8118b-4ebb-4166-ab18-d024b5229d8a tempest-ServerRescueTestJSONUnderV235-1957248754 tempest-ServerRescueTestJSONUnderV235-1957248754-project-member] Acquiring lock "fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 624.420761] env[62096]: DEBUG oslo_concurrency.lockutils [None req-5fe8118b-4ebb-4166-ab18-d024b5229d8a tempest-ServerRescueTestJSONUnderV235-1957248754 tempest-ServerRescueTestJSONUnderV235-1957248754-project-member] Lock "fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 624.701886] env[62096]: DEBUG oslo_concurrency.lockutils [None req-b15e5306-4a15-4327-a851-ae7862f80688 tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.644s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 624.702407] env[62096]: DEBUG nova.compute.manager [None req-b15e5306-4a15-4327-a851-ae7862f80688 tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] Start building networks asynchronously for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 624.704095] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b15e5306-4a15-4327-a851-ae7862f80688 tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Expecting reply to msg 975da82611b242e2ad8fefa4d67d78e6 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 624.705139] env[62096]: DEBUG oslo_concurrency.lockutils [None req-b165e3d0-efb4-4eaf-a7d6-1c4c9099d3ba tempest-ImagesOneServerTestJSON-1002718403 tempest-ImagesOneServerTestJSON-1002718403-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.226s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 624.706473] env[62096]: INFO nova.compute.claims [None req-b165e3d0-efb4-4eaf-a7d6-1c4c9099d3ba tempest-ImagesOneServerTestJSON-1002718403 tempest-ImagesOneServerTestJSON-1002718403-project-member] [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 624.708267] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b165e3d0-efb4-4eaf-a7d6-1c4c9099d3ba tempest-ImagesOneServerTestJSON-1002718403 tempest-ImagesOneServerTestJSON-1002718403-project-member] Expecting reply to msg 78424aef80b3440fbeb29aebe72eb81c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 624.716364] env[62096]: DEBUG oslo_concurrency.lockutils [None req-a7dac61a-a9fc-4db2-bd21-9f05c56d2478 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Releasing lock "refresh_cache-805c4c87-4e1a-4733-86a0-4c82daf615eb" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 624.716364] env[62096]: DEBUG nova.compute.manager [None req-a7dac61a-a9fc-4db2-bd21-9f05c56d2478 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] Start destroying the instance on the hypervisor. {{(pid=62096) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 624.716364] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-a7dac61a-a9fc-4db2-bd21-9f05c56d2478 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] Destroying instance {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 624.716364] env[62096]: DEBUG oslo_concurrency.lockutils [req-d7069ec9-ed56-4ed1-9d4f-ffbd313bc539 req-7d4f02c9-0dd6-4013-8939-867769869b92 service nova] Acquired lock "refresh_cache-805c4c87-4e1a-4733-86a0-4c82daf615eb" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 624.716364] env[62096]: DEBUG nova.network.neutron [req-d7069ec9-ed56-4ed1-9d4f-ffbd313bc539 req-7d4f02c9-0dd6-4013-8939-867769869b92 service nova] [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] Refreshing network info cache for port 62ed3475-1d31-4edd-b7c2-33b14a3775c7 {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 624.716583] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-d7069ec9-ed56-4ed1-9d4f-ffbd313bc539 req-7d4f02c9-0dd6-4013-8939-867769869b92 service nova] Expecting reply to msg 9304072d061646778098fc0a684d75a2 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 624.716583] env[62096]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5016be25-a74f-4c60-b2b1-35e96d37bd83 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.721508] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-009c60e6-b6d3-4af1-a8b3-12b9e6097ba1 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.737725] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9304072d061646778098fc0a684d75a2 [ 624.758748] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 78424aef80b3440fbeb29aebe72eb81c [ 624.758748] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 975da82611b242e2ad8fefa4d67d78e6 [ 624.758748] env[62096]: WARNING nova.virt.vmwareapi.vmops [None req-a7dac61a-a9fc-4db2-bd21-9f05c56d2478 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 805c4c87-4e1a-4733-86a0-4c82daf615eb could not be found. [ 624.758748] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-a7dac61a-a9fc-4db2-bd21-9f05c56d2478 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] Instance destroyed {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 624.758748] env[62096]: INFO nova.compute.manager [None req-a7dac61a-a9fc-4db2-bd21-9f05c56d2478 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] Took 0.05 seconds to destroy the instance on the hypervisor. [ 624.758748] env[62096]: DEBUG oslo.service.loopingcall [None req-a7dac61a-a9fc-4db2-bd21-9f05c56d2478 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 624.758748] env[62096]: DEBUG nova.compute.manager [-] [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 624.759434] env[62096]: DEBUG nova.network.neutron [-] [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 624.777758] env[62096]: DEBUG nova.network.neutron [-] [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 624.778433] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg f3f555005cb84a84953c87fff59ae980 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 624.786372] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f3f555005cb84a84953c87fff59ae980 [ 624.853495] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-8703d55f-3f3e-42bb-b5d4-8086e8f02c90 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Expecting reply to msg 07649a26631e46deb6ade8ae79349657 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 624.889274] env[62096]: DEBUG oslo_vmware.api [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Task: {'id': task-397371, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.514047} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 624.889620] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110/fb764baa-3805-45c4-a694-aa91b0932110.vmdk to [datastore2] 12a4b44d-b6aa-45e2-bba9-d73f41fa4b61/12a4b44d-b6aa-45e2-bba9-d73f41fa4b61.vmdk {{(pid=62096) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 624.889891] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] [instance: 12a4b44d-b6aa-45e2-bba9-d73f41fa4b61] Extending root virtual disk to 1048576 {{(pid=62096) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 624.890195] env[62096]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9f401689-4642-4d74-bd47-19b22cb24fee {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.898998] env[62096]: DEBUG oslo_vmware.api [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Waiting for the task: (returnval){ [ 624.898998] env[62096]: value = "task-397372" [ 624.898998] env[62096]: _type = "Task" [ 624.898998] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 624.905276] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 07649a26631e46deb6ade8ae79349657 [ 624.909303] env[62096]: DEBUG oslo_vmware.api [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Task: {'id': task-397372, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 625.212034] env[62096]: DEBUG nova.compute.utils [None req-b15e5306-4a15-4327-a851-ae7862f80688 tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Using /dev/sd instead of None {{(pid=62096) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 625.212291] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b15e5306-4a15-4327-a851-ae7862f80688 tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Expecting reply to msg b001fb6ca0644a77bcd2d1ffa8b7967a in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 625.214363] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b165e3d0-efb4-4eaf-a7d6-1c4c9099d3ba tempest-ImagesOneServerTestJSON-1002718403 tempest-ImagesOneServerTestJSON-1002718403-project-member] Expecting reply to msg 82326e93f1354811a2b3cf7f3343f802 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 625.215631] env[62096]: DEBUG nova.compute.manager [None req-b15e5306-4a15-4327-a851-ae7862f80688 tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] Allocating IP information in the background. {{(pid=62096) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 625.215673] env[62096]: DEBUG nova.network.neutron [None req-b15e5306-4a15-4327-a851-ae7862f80688 tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] allocate_for_instance() {{(pid=62096) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 625.223759] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b001fb6ca0644a77bcd2d1ffa8b7967a [ 625.224522] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 82326e93f1354811a2b3cf7f3343f802 [ 625.243473] env[62096]: DEBUG nova.network.neutron [req-d7069ec9-ed56-4ed1-9d4f-ffbd313bc539 req-7d4f02c9-0dd6-4013-8939-867769869b92 service nova] [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 625.281118] env[62096]: DEBUG nova.network.neutron [-] [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 625.281589] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 4de9d920f8c545409a4b594d094b073d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 625.293509] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4de9d920f8c545409a4b594d094b073d [ 625.335174] env[62096]: DEBUG nova.policy [None req-b15e5306-4a15-4327-a851-ae7862f80688 tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5ccdfb211bb74d039d058dab4d4ab0d8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '080433d09b5f4a62a950fd1278aff52a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62096) authorize /opt/stack/nova/nova/policy.py:203}} [ 625.362867] env[62096]: DEBUG nova.network.neutron [req-d7069ec9-ed56-4ed1-9d4f-ffbd313bc539 req-7d4f02c9-0dd6-4013-8939-867769869b92 service nova] [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 625.362867] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-d7069ec9-ed56-4ed1-9d4f-ffbd313bc539 req-7d4f02c9-0dd6-4013-8939-867769869b92 service nova] Expecting reply to msg 4f606943a25543ffab64ca4441f9b94d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 625.371856] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4f606943a25543ffab64ca4441f9b94d [ 625.389776] env[62096]: INFO nova.scheduler.client.report [None req-8703d55f-3f3e-42bb-b5d4-8086e8f02c90 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Deleted allocations for instance 3855c98d-4ade-4f6f-85aa-1297df5a39a9 [ 625.397053] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-8703d55f-3f3e-42bb-b5d4-8086e8f02c90 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Expecting reply to msg 3996114c0c0c45018170de0287c1a378 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 625.415163] env[62096]: DEBUG oslo_vmware.api [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Task: {'id': task-397372, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066008} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 625.415473] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] [instance: 12a4b44d-b6aa-45e2-bba9-d73f41fa4b61] Extended root virtual disk {{(pid=62096) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 625.417620] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad739337-5332-4f99-b18a-fc89fad2c1f8 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.449849] env[62096]: DEBUG nova.virt.vmwareapi.volumeops [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] [instance: 12a4b44d-b6aa-45e2-bba9-d73f41fa4b61] Reconfiguring VM instance instance-00000013 to attach disk [datastore2] 12a4b44d-b6aa-45e2-bba9-d73f41fa4b61/12a4b44d-b6aa-45e2-bba9-d73f41fa4b61.vmdk or device None with type sparse {{(pid=62096) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 625.450445] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3996114c0c0c45018170de0287c1a378 [ 625.451071] env[62096]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a9c708ca-6c03-497d-922c-accdcb4eaaf0 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.475813] env[62096]: DEBUG oslo_vmware.api [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Waiting for the task: (returnval){ [ 625.475813] env[62096]: value = "task-397373" [ 625.475813] env[62096]: _type = "Task" [ 625.475813] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 625.490798] env[62096]: DEBUG oslo_vmware.api [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Task: {'id': task-397373, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 625.719005] env[62096]: DEBUG nova.compute.manager [None req-b15e5306-4a15-4327-a851-ae7862f80688 tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] Start building block device mappings for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 625.720746] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b15e5306-4a15-4327-a851-ae7862f80688 tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Expecting reply to msg 4ab3b4915da44bdea91a67ef711d013d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 625.745482] env[62096]: DEBUG nova.compute.manager [req-476be781-2305-4532-9c3f-91e1ff17b997 req-c31938d8-9065-47b2-9485-6a0fd778138b service nova] [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] Received event network-vif-deleted-62ed3475-1d31-4edd-b7c2-33b14a3775c7 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 625.773853] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4ab3b4915da44bdea91a67ef711d013d [ 625.784217] env[62096]: INFO nova.compute.manager [-] [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] Took 1.03 seconds to deallocate network for instance. [ 625.788688] env[62096]: DEBUG nova.network.neutron [None req-b15e5306-4a15-4327-a851-ae7862f80688 tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] Successfully created port: 013f7baa-f84e-483f-8fe4-b01cd8c88b38 {{(pid=62096) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 625.790914] env[62096]: DEBUG nova.compute.claims [None req-a7dac61a-a9fc-4db2-bd21-9f05c56d2478 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] Aborting claim: {{(pid=62096) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 625.791095] env[62096]: DEBUG oslo_concurrency.lockutils [None req-a7dac61a-a9fc-4db2-bd21-9f05c56d2478 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 625.872615] env[62096]: DEBUG oslo_concurrency.lockutils [req-d7069ec9-ed56-4ed1-9d4f-ffbd313bc539 req-7d4f02c9-0dd6-4013-8939-867769869b92 service nova] Releasing lock "refresh_cache-805c4c87-4e1a-4733-86a0-4c82daf615eb" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 625.907706] env[62096]: DEBUG oslo_concurrency.lockutils [None req-8703d55f-3f3e-42bb-b5d4-8086e8f02c90 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Lock "3855c98d-4ade-4f6f-85aa-1297df5a39a9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 69.722s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 625.907706] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Expecting reply to msg ba7aff866a4f47f6b3753b449e93f017 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 625.920713] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ba7aff866a4f47f6b3753b449e93f017 [ 625.984791] env[62096]: DEBUG oslo_vmware.api [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Task: {'id': task-397373, 'name': ReconfigVM_Task, 'duration_secs': 0.283326} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 625.985101] env[62096]: DEBUG nova.virt.vmwareapi.volumeops [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] [instance: 12a4b44d-b6aa-45e2-bba9-d73f41fa4b61] Reconfigured VM instance instance-00000013 to attach disk [datastore2] 12a4b44d-b6aa-45e2-bba9-d73f41fa4b61/12a4b44d-b6aa-45e2-bba9-d73f41fa4b61.vmdk or device None with type sparse {{(pid=62096) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 625.985841] env[62096]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-16a3a59b-e0f7-42b6-9a91-7436d0050ff5 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.993750] env[62096]: DEBUG oslo_vmware.api [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Waiting for the task: (returnval){ [ 625.993750] env[62096]: value = "task-397374" [ 625.993750] env[62096]: _type = "Task" [ 625.993750] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 626.008589] env[62096]: DEBUG oslo_vmware.api [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Task: {'id': task-397374, 'name': Rename_Task} progress is 6%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 626.229206] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b15e5306-4a15-4327-a851-ae7862f80688 tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Expecting reply to msg fce1d08f000e49c7afdb5248033321d2 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 626.239509] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e92869b9-a8be-4809-9bfc-62045c5b9ea3 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.246772] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aae687e4-ea2d-4ff3-b8f6-5544078dbb21 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.277247] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fce1d08f000e49c7afdb5248033321d2 [ 626.278145] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e87adca-e559-443c-9f8d-431263957308 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.285973] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-019a9c3a-878f-4296-a195-80b74a6beac5 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.299355] env[62096]: DEBUG nova.compute.provider_tree [None req-b165e3d0-efb4-4eaf-a7d6-1c4c9099d3ba tempest-ImagesOneServerTestJSON-1002718403 tempest-ImagesOneServerTestJSON-1002718403-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 626.299831] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b165e3d0-efb4-4eaf-a7d6-1c4c9099d3ba tempest-ImagesOneServerTestJSON-1002718403 tempest-ImagesOneServerTestJSON-1002718403-project-member] Expecting reply to msg fabdca8cdeb3405ba6a786755927d07d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 626.306499] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fabdca8cdeb3405ba6a786755927d07d [ 626.409221] env[62096]: DEBUG nova.compute.manager [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] Starting instance... {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 626.410971] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Expecting reply to msg 67698a322769413c8fc445f727cc3222 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 626.454413] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 67698a322769413c8fc445f727cc3222 [ 626.503597] env[62096]: DEBUG oslo_vmware.api [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Task: {'id': task-397374, 'name': Rename_Task, 'duration_secs': 0.145546} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 626.503943] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] [instance: 12a4b44d-b6aa-45e2-bba9-d73f41fa4b61] Powering on the VM {{(pid=62096) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 626.504168] env[62096]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bfa4b760-b37e-4873-b9a8-f8e939d71c59 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.509998] env[62096]: DEBUG oslo_vmware.api [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Waiting for the task: (returnval){ [ 626.509998] env[62096]: value = "task-397375" [ 626.509998] env[62096]: _type = "Task" [ 626.509998] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 626.517343] env[62096]: DEBUG oslo_vmware.api [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Task: {'id': task-397375, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 626.734297] env[62096]: DEBUG nova.compute.manager [None req-b15e5306-4a15-4327-a851-ae7862f80688 tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] Start spawning the instance on the hypervisor. {{(pid=62096) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 626.753920] env[62096]: DEBUG nova.virt.hardware [None req-b15e5306-4a15-4327-a851-ae7862f80688 tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-19T11:52:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-19T11:51:55Z,direct_url=,disk_format='vmdk',id=fb764baa-3805-45c4-a694-aa91b0932110,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bd76085d004b408bbcf1ebf76ba2593e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-19T11:51:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 626.754235] env[62096]: DEBUG nova.virt.hardware [None req-b15e5306-4a15-4327-a851-ae7862f80688 tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Flavor limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 626.754470] env[62096]: DEBUG nova.virt.hardware [None req-b15e5306-4a15-4327-a851-ae7862f80688 tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Image limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 626.754671] env[62096]: DEBUG nova.virt.hardware [None req-b15e5306-4a15-4327-a851-ae7862f80688 tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Flavor pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 626.754814] env[62096]: DEBUG nova.virt.hardware [None req-b15e5306-4a15-4327-a851-ae7862f80688 tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Image pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 626.755034] env[62096]: DEBUG nova.virt.hardware [None req-b15e5306-4a15-4327-a851-ae7862f80688 tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 626.755238] env[62096]: DEBUG nova.virt.hardware [None req-b15e5306-4a15-4327-a851-ae7862f80688 tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 626.755388] env[62096]: DEBUG nova.virt.hardware [None req-b15e5306-4a15-4327-a851-ae7862f80688 tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 626.755542] env[62096]: DEBUG nova.virt.hardware [None req-b15e5306-4a15-4327-a851-ae7862f80688 tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Got 1 possible topologies {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 626.755697] env[62096]: DEBUG nova.virt.hardware [None req-b15e5306-4a15-4327-a851-ae7862f80688 tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 626.755859] env[62096]: DEBUG nova.virt.hardware [None req-b15e5306-4a15-4327-a851-ae7862f80688 tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 626.756711] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fab92b0f-ef61-4f10-87b2-7985550fea7d {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.765244] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-582b3dda-c951-47e3-814c-aafb8ac99d21 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.802155] env[62096]: DEBUG nova.scheduler.client.report [None req-b165e3d0-efb4-4eaf-a7d6-1c4c9099d3ba tempest-ImagesOneServerTestJSON-1002718403 tempest-ImagesOneServerTestJSON-1002718403-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 626.804882] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b165e3d0-efb4-4eaf-a7d6-1c4c9099d3ba tempest-ImagesOneServerTestJSON-1002718403 tempest-ImagesOneServerTestJSON-1002718403-project-member] Expecting reply to msg 55e85af4acc0449796660206b6ce2741 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 626.816562] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 55e85af4acc0449796660206b6ce2741 [ 626.932825] env[62096]: DEBUG oslo_concurrency.lockutils [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 627.019963] env[62096]: DEBUG oslo_vmware.api [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Task: {'id': task-397375, 'name': PowerOnVM_Task, 'duration_secs': 0.440371} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 627.020283] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] [instance: 12a4b44d-b6aa-45e2-bba9-d73f41fa4b61] Powered on the VM {{(pid=62096) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 627.020472] env[62096]: INFO nova.compute.manager [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] [instance: 12a4b44d-b6aa-45e2-bba9-d73f41fa4b61] Took 9.64 seconds to spawn the instance on the hypervisor. [ 627.020709] env[62096]: DEBUG nova.compute.manager [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] [instance: 12a4b44d-b6aa-45e2-bba9-d73f41fa4b61] Checking state {{(pid=62096) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 627.021550] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2543552c-4906-4132-9a83-f0acaaa2b971 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.029305] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Expecting reply to msg f790e0791d514162a8c5925b7476b116 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 627.089750] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f790e0791d514162a8c5925b7476b116 [ 627.156267] env[62096]: ERROR nova.compute.manager [None req-b15e5306-4a15-4327-a851-ae7862f80688 tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 013f7baa-f84e-483f-8fe4-b01cd8c88b38, please check neutron logs for more information. [ 627.156267] env[62096]: ERROR nova.compute.manager Traceback (most recent call last): [ 627.156267] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 627.156267] env[62096]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 627.156267] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 627.156267] env[62096]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 627.156267] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 627.156267] env[62096]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 627.156267] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 627.156267] env[62096]: ERROR nova.compute.manager self.force_reraise() [ 627.156267] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 627.156267] env[62096]: ERROR nova.compute.manager raise self.value [ 627.156267] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 627.156267] env[62096]: ERROR nova.compute.manager updated_port = self._update_port( [ 627.156267] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 627.156267] env[62096]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 627.156763] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 627.156763] env[62096]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 627.156763] env[62096]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 013f7baa-f84e-483f-8fe4-b01cd8c88b38, please check neutron logs for more information. [ 627.156763] env[62096]: ERROR nova.compute.manager [ 627.156763] env[62096]: Traceback (most recent call last): [ 627.156763] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 627.156763] env[62096]: listener.cb(fileno) [ 627.156763] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 627.156763] env[62096]: result = function(*args, **kwargs) [ 627.156763] env[62096]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 627.156763] env[62096]: return func(*args, **kwargs) [ 627.156763] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 627.156763] env[62096]: raise e [ 627.156763] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 627.156763] env[62096]: nwinfo = self.network_api.allocate_for_instance( [ 627.156763] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 627.156763] env[62096]: created_port_ids = self._update_ports_for_instance( [ 627.156763] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 627.156763] env[62096]: with excutils.save_and_reraise_exception(): [ 627.156763] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 627.156763] env[62096]: self.force_reraise() [ 627.156763] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 627.156763] env[62096]: raise self.value [ 627.156763] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 627.156763] env[62096]: updated_port = self._update_port( [ 627.156763] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 627.156763] env[62096]: _ensure_no_port_binding_failure(port) [ 627.156763] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 627.156763] env[62096]: raise exception.PortBindingFailed(port_id=port['id']) [ 627.157661] env[62096]: nova.exception.PortBindingFailed: Binding failed for port 013f7baa-f84e-483f-8fe4-b01cd8c88b38, please check neutron logs for more information. [ 627.157661] env[62096]: Removing descriptor: 14 [ 627.157661] env[62096]: ERROR nova.compute.manager [None req-b15e5306-4a15-4327-a851-ae7862f80688 tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 013f7baa-f84e-483f-8fe4-b01cd8c88b38, please check neutron logs for more information. [ 627.157661] env[62096]: ERROR nova.compute.manager [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] Traceback (most recent call last): [ 627.157661] env[62096]: ERROR nova.compute.manager [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 627.157661] env[62096]: ERROR nova.compute.manager [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] yield resources [ 627.157661] env[62096]: ERROR nova.compute.manager [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 627.157661] env[62096]: ERROR nova.compute.manager [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] self.driver.spawn(context, instance, image_meta, [ 627.157661] env[62096]: ERROR nova.compute.manager [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 627.157661] env[62096]: ERROR nova.compute.manager [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 627.157661] env[62096]: ERROR nova.compute.manager [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 627.157661] env[62096]: ERROR nova.compute.manager [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] vm_ref = self.build_virtual_machine(instance, [ 627.157992] env[62096]: ERROR nova.compute.manager [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 627.157992] env[62096]: ERROR nova.compute.manager [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] vif_infos = vmwarevif.get_vif_info(self._session, [ 627.157992] env[62096]: ERROR nova.compute.manager [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 627.157992] env[62096]: ERROR nova.compute.manager [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] for vif in network_info: [ 627.157992] env[62096]: ERROR nova.compute.manager [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 627.157992] env[62096]: ERROR nova.compute.manager [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] return self._sync_wrapper(fn, *args, **kwargs) [ 627.157992] env[62096]: ERROR nova.compute.manager [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 627.157992] env[62096]: ERROR nova.compute.manager [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] self.wait() [ 627.157992] env[62096]: ERROR nova.compute.manager [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 627.157992] env[62096]: ERROR nova.compute.manager [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] self[:] = self._gt.wait() [ 627.157992] env[62096]: ERROR nova.compute.manager [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 627.157992] env[62096]: ERROR nova.compute.manager [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] return self._exit_event.wait() [ 627.157992] env[62096]: ERROR nova.compute.manager [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 627.158289] env[62096]: ERROR nova.compute.manager [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] result = hub.switch() [ 627.158289] env[62096]: ERROR nova.compute.manager [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 627.158289] env[62096]: ERROR nova.compute.manager [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] return self.greenlet.switch() [ 627.158289] env[62096]: ERROR nova.compute.manager [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 627.158289] env[62096]: ERROR nova.compute.manager [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] result = function(*args, **kwargs) [ 627.158289] env[62096]: ERROR nova.compute.manager [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 627.158289] env[62096]: ERROR nova.compute.manager [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] return func(*args, **kwargs) [ 627.158289] env[62096]: ERROR nova.compute.manager [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 627.158289] env[62096]: ERROR nova.compute.manager [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] raise e [ 627.158289] env[62096]: ERROR nova.compute.manager [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 627.158289] env[62096]: ERROR nova.compute.manager [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] nwinfo = self.network_api.allocate_for_instance( [ 627.158289] env[62096]: ERROR nova.compute.manager [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 627.158289] env[62096]: ERROR nova.compute.manager [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] created_port_ids = self._update_ports_for_instance( [ 627.158599] env[62096]: ERROR nova.compute.manager [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 627.158599] env[62096]: ERROR nova.compute.manager [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] with excutils.save_and_reraise_exception(): [ 627.158599] env[62096]: ERROR nova.compute.manager [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 627.158599] env[62096]: ERROR nova.compute.manager [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] self.force_reraise() [ 627.158599] env[62096]: ERROR nova.compute.manager [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 627.158599] env[62096]: ERROR nova.compute.manager [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] raise self.value [ 627.158599] env[62096]: ERROR nova.compute.manager [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 627.158599] env[62096]: ERROR nova.compute.manager [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] updated_port = self._update_port( [ 627.158599] env[62096]: ERROR nova.compute.manager [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 627.158599] env[62096]: ERROR nova.compute.manager [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] _ensure_no_port_binding_failure(port) [ 627.158599] env[62096]: ERROR nova.compute.manager [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 627.158599] env[62096]: ERROR nova.compute.manager [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] raise exception.PortBindingFailed(port_id=port['id']) [ 627.158881] env[62096]: ERROR nova.compute.manager [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] nova.exception.PortBindingFailed: Binding failed for port 013f7baa-f84e-483f-8fe4-b01cd8c88b38, please check neutron logs for more information. [ 627.158881] env[62096]: ERROR nova.compute.manager [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] [ 627.158881] env[62096]: INFO nova.compute.manager [None req-b15e5306-4a15-4327-a851-ae7862f80688 tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] Terminating instance [ 627.159731] env[62096]: DEBUG oslo_concurrency.lockutils [None req-b15e5306-4a15-4327-a851-ae7862f80688 tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Acquiring lock "refresh_cache-e48a5bde-e299-4567-b952-3c5f096fb65d" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 627.159881] env[62096]: DEBUG oslo_concurrency.lockutils [None req-b15e5306-4a15-4327-a851-ae7862f80688 tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Acquired lock "refresh_cache-e48a5bde-e299-4567-b952-3c5f096fb65d" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 627.160053] env[62096]: DEBUG nova.network.neutron [None req-b15e5306-4a15-4327-a851-ae7862f80688 tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 627.160465] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b15e5306-4a15-4327-a851-ae7862f80688 tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Expecting reply to msg a50b7adf24b848c4ba1fa40ae1d7c941 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 627.167871] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a50b7adf24b848c4ba1fa40ae1d7c941 [ 627.226882] env[62096]: DEBUG oslo_concurrency.lockutils [None req-8017d6f3-a8b6-4835-9973-b3f86aaa9630 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Acquiring lock "510189aa-204c-4fd6-90d5-47a7ce5f7630" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 627.227124] env[62096]: DEBUG oslo_concurrency.lockutils [None req-8017d6f3-a8b6-4835-9973-b3f86aaa9630 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Lock "510189aa-204c-4fd6-90d5-47a7ce5f7630" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 627.307980] env[62096]: DEBUG oslo_concurrency.lockutils [None req-b165e3d0-efb4-4eaf-a7d6-1c4c9099d3ba tempest-ImagesOneServerTestJSON-1002718403 tempest-ImagesOneServerTestJSON-1002718403-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.603s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 627.308510] env[62096]: DEBUG nova.compute.manager [None req-b165e3d0-efb4-4eaf-a7d6-1c4c9099d3ba tempest-ImagesOneServerTestJSON-1002718403 tempest-ImagesOneServerTestJSON-1002718403-project-member] [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] Start building networks asynchronously for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 627.310124] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b165e3d0-efb4-4eaf-a7d6-1c4c9099d3ba tempest-ImagesOneServerTestJSON-1002718403 tempest-ImagesOneServerTestJSON-1002718403-project-member] Expecting reply to msg 333212fb653f4cfcb9795ea399e37701 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 627.311125] env[62096]: DEBUG oslo_concurrency.lockutils [None req-dbe77661-ff6b-4cc0-8cbb-e6cfd25c54ab tempest-ServerDiagnosticsTest-1671101402 tempest-ServerDiagnosticsTest-1671101402-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.622s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 627.312877] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-dbe77661-ff6b-4cc0-8cbb-e6cfd25c54ab tempest-ServerDiagnosticsTest-1671101402 tempest-ServerDiagnosticsTest-1671101402-project-member] Expecting reply to msg ab7957652b1d4ef3a25bc7cf37b85571 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 627.347532] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 333212fb653f4cfcb9795ea399e37701 [ 627.354058] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ab7957652b1d4ef3a25bc7cf37b85571 [ 627.540729] env[62096]: INFO nova.compute.manager [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] [instance: 12a4b44d-b6aa-45e2-bba9-d73f41fa4b61] Took 30.45 seconds to build instance. [ 627.540729] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Expecting reply to msg efb3d7904bf4423bbe2d3e7eb466a2df in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 627.549517] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg efb3d7904bf4423bbe2d3e7eb466a2df [ 627.677623] env[62096]: DEBUG nova.network.neutron [None req-b15e5306-4a15-4327-a851-ae7862f80688 tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 627.741694] env[62096]: DEBUG nova.network.neutron [None req-b15e5306-4a15-4327-a851-ae7862f80688 tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 627.742246] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b15e5306-4a15-4327-a851-ae7862f80688 tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Expecting reply to msg bb6cc1597291484990217abb8317e25e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 627.751393] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bb6cc1597291484990217abb8317e25e [ 627.770164] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1c0d1a39-e20c-4bdc-8047-e189fe99c536 tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Expecting reply to msg 2edab93e9b16484bb0aacc63541bdb01 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 627.782383] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2edab93e9b16484bb0aacc63541bdb01 [ 627.819109] env[62096]: DEBUG nova.compute.utils [None req-b165e3d0-efb4-4eaf-a7d6-1c4c9099d3ba tempest-ImagesOneServerTestJSON-1002718403 tempest-ImagesOneServerTestJSON-1002718403-project-member] Using /dev/sd instead of None {{(pid=62096) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 627.819894] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b165e3d0-efb4-4eaf-a7d6-1c4c9099d3ba tempest-ImagesOneServerTestJSON-1002718403 tempest-ImagesOneServerTestJSON-1002718403-project-member] Expecting reply to msg 8cdf512621b74a8189ff1e0a5389ab4d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 627.821192] env[62096]: DEBUG nova.compute.manager [None req-b165e3d0-efb4-4eaf-a7d6-1c4c9099d3ba tempest-ImagesOneServerTestJSON-1002718403 tempest-ImagesOneServerTestJSON-1002718403-project-member] [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] Allocating IP information in the background. {{(pid=62096) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 627.821534] env[62096]: DEBUG nova.network.neutron [None req-b165e3d0-efb4-4eaf-a7d6-1c4c9099d3ba tempest-ImagesOneServerTestJSON-1002718403 tempest-ImagesOneServerTestJSON-1002718403-project-member] [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] allocate_for_instance() {{(pid=62096) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 627.834797] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8cdf512621b74a8189ff1e0a5389ab4d [ 627.873863] env[62096]: DEBUG nova.policy [None req-b165e3d0-efb4-4eaf-a7d6-1c4c9099d3ba tempest-ImagesOneServerTestJSON-1002718403 tempest-ImagesOneServerTestJSON-1002718403-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ca7b4d1e50d84e229c34d97fbe2fb067', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5d5d111bf6b141abaea02ba14668f5e6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62096) authorize /opt/stack/nova/nova/policy.py:203}} [ 627.905135] env[62096]: DEBUG nova.compute.manager [req-71da149f-f742-40cc-8d09-9b342dd5340c req-15d02e32-9593-45c2-b330-e4477a3454ea service nova] [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] Received event network-changed-013f7baa-f84e-483f-8fe4-b01cd8c88b38 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 627.905135] env[62096]: DEBUG nova.compute.manager [req-71da149f-f742-40cc-8d09-9b342dd5340c req-15d02e32-9593-45c2-b330-e4477a3454ea service nova] [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] Refreshing instance network info cache due to event network-changed-013f7baa-f84e-483f-8fe4-b01cd8c88b38. {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 627.905135] env[62096]: DEBUG oslo_concurrency.lockutils [req-71da149f-f742-40cc-8d09-9b342dd5340c req-15d02e32-9593-45c2-b330-e4477a3454ea service nova] Acquiring lock "refresh_cache-e48a5bde-e299-4567-b952-3c5f096fb65d" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 628.002024] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-6f2dbda3-b665-4f79-8956-f4f8aac23815 tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Expecting reply to msg ac2e3452ace04e3f8e25cc22d0fd20c2 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 628.012474] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ac2e3452ace04e3f8e25cc22d0fd20c2 [ 628.039969] env[62096]: DEBUG oslo_concurrency.lockutils [None req-832b1693-e1b1-43d7-b100-b79b0f216acb tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Lock "12a4b44d-b6aa-45e2-bba9-d73f41fa4b61" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 69.916s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 628.040599] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Expecting reply to msg aa1b0a0d2ff747669d9d3e2854d533c2 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 628.059337] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg aa1b0a0d2ff747669d9d3e2854d533c2 [ 628.183068] env[62096]: DEBUG nova.network.neutron [None req-b165e3d0-efb4-4eaf-a7d6-1c4c9099d3ba tempest-ImagesOneServerTestJSON-1002718403 tempest-ImagesOneServerTestJSON-1002718403-project-member] [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] Successfully created port: ec4bdedd-76f7-4dad-92db-f301287338c1 {{(pid=62096) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 628.246710] env[62096]: DEBUG oslo_concurrency.lockutils [None req-b15e5306-4a15-4327-a851-ae7862f80688 tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Releasing lock "refresh_cache-e48a5bde-e299-4567-b952-3c5f096fb65d" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 628.247140] env[62096]: DEBUG nova.compute.manager [None req-b15e5306-4a15-4327-a851-ae7862f80688 tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] Start destroying the instance on the hypervisor. {{(pid=62096) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 628.247339] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-b15e5306-4a15-4327-a851-ae7862f80688 tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] Destroying instance {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 628.247819] env[62096]: DEBUG oslo_concurrency.lockutils [req-71da149f-f742-40cc-8d09-9b342dd5340c req-15d02e32-9593-45c2-b330-e4477a3454ea service nova] Acquired lock "refresh_cache-e48a5bde-e299-4567-b952-3c5f096fb65d" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 628.247986] env[62096]: DEBUG nova.network.neutron [req-71da149f-f742-40cc-8d09-9b342dd5340c req-15d02e32-9593-45c2-b330-e4477a3454ea service nova] [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] Refreshing network info cache for port 013f7baa-f84e-483f-8fe4-b01cd8c88b38 {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 628.248410] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-71da149f-f742-40cc-8d09-9b342dd5340c req-15d02e32-9593-45c2-b330-e4477a3454ea service nova] Expecting reply to msg 0b0f4f6d4b764639b13e283bc4fe8a4f in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 628.249174] env[62096]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-37078b53-3e62-423a-8909-3ca225e1d3d2 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.258828] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0b0f4f6d4b764639b13e283bc4fe8a4f [ 628.266941] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bb1a7f6-1e28-4deb-a680-5faedfdf5271 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.280482] env[62096]: DEBUG nova.compute.manager [None req-1c0d1a39-e20c-4bdc-8047-e189fe99c536 tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] [instance: 12a4b44d-b6aa-45e2-bba9-d73f41fa4b61] Checking state {{(pid=62096) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 628.281511] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf3c0c82-0cbb-418d-b3ef-f3779aaae8a4 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.288888] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1c0d1a39-e20c-4bdc-8047-e189fe99c536 tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Expecting reply to msg b616600f31ec40149b5483bdbc16401b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 628.292266] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0eceb917-3626-4658-ac67-c6ec83fd9033 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.299540] env[62096]: WARNING nova.virt.vmwareapi.vmops [None req-b15e5306-4a15-4327-a851-ae7862f80688 tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance e48a5bde-e299-4567-b952-3c5f096fb65d could not be found. [ 628.299754] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-b15e5306-4a15-4327-a851-ae7862f80688 tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] Instance destroyed {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 628.299933] env[62096]: INFO nova.compute.manager [None req-b15e5306-4a15-4327-a851-ae7862f80688 tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] Took 0.05 seconds to destroy the instance on the hypervisor. [ 628.300184] env[62096]: DEBUG oslo.service.loopingcall [None req-b15e5306-4a15-4327-a851-ae7862f80688 tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 628.300716] env[62096]: DEBUG nova.compute.manager [-] [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 628.300805] env[62096]: DEBUG nova.network.neutron [-] [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 628.304883] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcad60ef-8f21-4820-8767-c82d6a9560fe {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.333701] env[62096]: DEBUG nova.compute.manager [None req-b165e3d0-efb4-4eaf-a7d6-1c4c9099d3ba tempest-ImagesOneServerTestJSON-1002718403 tempest-ImagesOneServerTestJSON-1002718403-project-member] [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] Start building block device mappings for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 628.335651] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b165e3d0-efb4-4eaf-a7d6-1c4c9099d3ba tempest-ImagesOneServerTestJSON-1002718403 tempest-ImagesOneServerTestJSON-1002718403-project-member] Expecting reply to msg 7cb9b4c3c9064a5a9fddc9f05b0ec767 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 628.337399] env[62096]: DEBUG nova.network.neutron [-] [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 628.337842] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 12ec54e25a7b4c1aa4a2bc5d27c71f77 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 628.338725] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b616600f31ec40149b5483bdbc16401b [ 628.339652] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cdad3cd-99d3-4487-a48d-24b59b461a2e {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.346906] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 12ec54e25a7b4c1aa4a2bc5d27c71f77 [ 628.348622] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90693274-91b2-4b5f-a230-ab2665f03142 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.362562] env[62096]: DEBUG nova.compute.provider_tree [None req-dbe77661-ff6b-4cc0-8cbb-e6cfd25c54ab tempest-ServerDiagnosticsTest-1671101402 tempest-ServerDiagnosticsTest-1671101402-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 628.363087] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-dbe77661-ff6b-4cc0-8cbb-e6cfd25c54ab tempest-ServerDiagnosticsTest-1671101402 tempest-ServerDiagnosticsTest-1671101402-project-member] Expecting reply to msg d9ac44dcb2e0455a9d95c970f522f660 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 628.370550] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7cb9b4c3c9064a5a9fddc9f05b0ec767 [ 628.373006] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d9ac44dcb2e0455a9d95c970f522f660 [ 628.503485] env[62096]: DEBUG oslo_concurrency.lockutils [None req-6f2dbda3-b665-4f79-8956-f4f8aac23815 tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Acquiring lock "12a4b44d-b6aa-45e2-bba9-d73f41fa4b61" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 628.503744] env[62096]: DEBUG oslo_concurrency.lockutils [None req-6f2dbda3-b665-4f79-8956-f4f8aac23815 tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Lock "12a4b44d-b6aa-45e2-bba9-d73f41fa4b61" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 628.504031] env[62096]: DEBUG oslo_concurrency.lockutils [None req-6f2dbda3-b665-4f79-8956-f4f8aac23815 tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Acquiring lock "12a4b44d-b6aa-45e2-bba9-d73f41fa4b61-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 628.504638] env[62096]: DEBUG oslo_concurrency.lockutils [None req-6f2dbda3-b665-4f79-8956-f4f8aac23815 tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Lock "12a4b44d-b6aa-45e2-bba9-d73f41fa4b61-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 628.504638] env[62096]: DEBUG oslo_concurrency.lockutils [None req-6f2dbda3-b665-4f79-8956-f4f8aac23815 tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Lock "12a4b44d-b6aa-45e2-bba9-d73f41fa4b61-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 628.506528] env[62096]: INFO nova.compute.manager [None req-6f2dbda3-b665-4f79-8956-f4f8aac23815 tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] [instance: 12a4b44d-b6aa-45e2-bba9-d73f41fa4b61] Terminating instance [ 628.508188] env[62096]: DEBUG oslo_concurrency.lockutils [None req-6f2dbda3-b665-4f79-8956-f4f8aac23815 tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Acquiring lock "refresh_cache-12a4b44d-b6aa-45e2-bba9-d73f41fa4b61" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 628.508369] env[62096]: DEBUG oslo_concurrency.lockutils [None req-6f2dbda3-b665-4f79-8956-f4f8aac23815 tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Acquired lock "refresh_cache-12a4b44d-b6aa-45e2-bba9-d73f41fa4b61" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 628.508587] env[62096]: DEBUG nova.network.neutron [None req-6f2dbda3-b665-4f79-8956-f4f8aac23815 tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] [instance: 12a4b44d-b6aa-45e2-bba9-d73f41fa4b61] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 628.509017] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-6f2dbda3-b665-4f79-8956-f4f8aac23815 tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Expecting reply to msg 83eab811231643d5a0c48b072a508364 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 628.515523] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 83eab811231643d5a0c48b072a508364 [ 628.542846] env[62096]: DEBUG nova.compute.manager [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] Starting instance... {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 628.544584] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Expecting reply to msg 9a89b5494272406d9f3a938a2b474124 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 628.576849] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9a89b5494272406d9f3a938a2b474124 [ 628.786918] env[62096]: DEBUG nova.network.neutron [req-71da149f-f742-40cc-8d09-9b342dd5340c req-15d02e32-9593-45c2-b330-e4477a3454ea service nova] [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 628.800601] env[62096]: INFO nova.compute.manager [None req-1c0d1a39-e20c-4bdc-8047-e189fe99c536 tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] [instance: 12a4b44d-b6aa-45e2-bba9-d73f41fa4b61] instance snapshotting [ 628.801201] env[62096]: DEBUG nova.objects.instance [None req-1c0d1a39-e20c-4bdc-8047-e189fe99c536 tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Lazy-loading 'flavor' on Instance uuid 12a4b44d-b6aa-45e2-bba9-d73f41fa4b61 {{(pid=62096) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 628.801841] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1c0d1a39-e20c-4bdc-8047-e189fe99c536 tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Expecting reply to msg 4f5538e018684234b8deca0b4cf68a07 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 628.835754] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4f5538e018684234b8deca0b4cf68a07 [ 628.846975] env[62096]: DEBUG nova.network.neutron [-] [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 628.847515] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 967fe53304c44fdc8c662c8145d5acac in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 628.854538] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b165e3d0-efb4-4eaf-a7d6-1c4c9099d3ba tempest-ImagesOneServerTestJSON-1002718403 tempest-ImagesOneServerTestJSON-1002718403-project-member] Expecting reply to msg 1ab4d07e9e6a450ca79b92628a9a4dc4 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 628.860305] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 967fe53304c44fdc8c662c8145d5acac [ 628.865118] env[62096]: DEBUG nova.scheduler.client.report [None req-dbe77661-ff6b-4cc0-8cbb-e6cfd25c54ab tempest-ServerDiagnosticsTest-1671101402 tempest-ServerDiagnosticsTest-1671101402-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 628.867781] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-dbe77661-ff6b-4cc0-8cbb-e6cfd25c54ab tempest-ServerDiagnosticsTest-1671101402 tempest-ServerDiagnosticsTest-1671101402-project-member] Expecting reply to msg df76474161c149348ba31cf98e62c61a in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 628.884975] env[62096]: DEBUG nova.network.neutron [req-71da149f-f742-40cc-8d09-9b342dd5340c req-15d02e32-9593-45c2-b330-e4477a3454ea service nova] [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 628.885557] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-71da149f-f742-40cc-8d09-9b342dd5340c req-15d02e32-9593-45c2-b330-e4477a3454ea service nova] Expecting reply to msg a0261167d6124c5bab83f62c6182caab in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 628.887910] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg df76474161c149348ba31cf98e62c61a [ 628.899074] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a0261167d6124c5bab83f62c6182caab [ 628.914463] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1ab4d07e9e6a450ca79b92628a9a4dc4 [ 629.043743] env[62096]: DEBUG nova.network.neutron [None req-6f2dbda3-b665-4f79-8956-f4f8aac23815 tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] [instance: 12a4b44d-b6aa-45e2-bba9-d73f41fa4b61] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 629.060713] env[62096]: ERROR nova.compute.manager [None req-b165e3d0-efb4-4eaf-a7d6-1c4c9099d3ba tempest-ImagesOneServerTestJSON-1002718403 tempest-ImagesOneServerTestJSON-1002718403-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port ec4bdedd-76f7-4dad-92db-f301287338c1, please check neutron logs for more information. [ 629.060713] env[62096]: ERROR nova.compute.manager Traceback (most recent call last): [ 629.060713] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 629.060713] env[62096]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 629.060713] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 629.060713] env[62096]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 629.060713] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 629.060713] env[62096]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 629.060713] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 629.060713] env[62096]: ERROR nova.compute.manager self.force_reraise() [ 629.060713] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 629.060713] env[62096]: ERROR nova.compute.manager raise self.value [ 629.060713] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 629.060713] env[62096]: ERROR nova.compute.manager updated_port = self._update_port( [ 629.060713] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 629.060713] env[62096]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 629.061152] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 629.061152] env[62096]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 629.061152] env[62096]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port ec4bdedd-76f7-4dad-92db-f301287338c1, please check neutron logs for more information. [ 629.061152] env[62096]: ERROR nova.compute.manager [ 629.061152] env[62096]: Traceback (most recent call last): [ 629.061152] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 629.061152] env[62096]: listener.cb(fileno) [ 629.061152] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 629.061152] env[62096]: result = function(*args, **kwargs) [ 629.061152] env[62096]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 629.061152] env[62096]: return func(*args, **kwargs) [ 629.061152] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 629.061152] env[62096]: raise e [ 629.061152] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 629.061152] env[62096]: nwinfo = self.network_api.allocate_for_instance( [ 629.061152] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 629.061152] env[62096]: created_port_ids = self._update_ports_for_instance( [ 629.061152] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 629.061152] env[62096]: with excutils.save_and_reraise_exception(): [ 629.061152] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 629.061152] env[62096]: self.force_reraise() [ 629.061152] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 629.061152] env[62096]: raise self.value [ 629.061152] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 629.061152] env[62096]: updated_port = self._update_port( [ 629.061152] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 629.061152] env[62096]: _ensure_no_port_binding_failure(port) [ 629.061152] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 629.061152] env[62096]: raise exception.PortBindingFailed(port_id=port['id']) [ 629.061860] env[62096]: nova.exception.PortBindingFailed: Binding failed for port ec4bdedd-76f7-4dad-92db-f301287338c1, please check neutron logs for more information. [ 629.061860] env[62096]: Removing descriptor: 14 [ 629.070694] env[62096]: DEBUG oslo_concurrency.lockutils [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 629.143740] env[62096]: DEBUG nova.network.neutron [None req-6f2dbda3-b665-4f79-8956-f4f8aac23815 tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] [instance: 12a4b44d-b6aa-45e2-bba9-d73f41fa4b61] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 629.145648] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-6f2dbda3-b665-4f79-8956-f4f8aac23815 tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Expecting reply to msg 2a833870cd38467ebef8966f0d86a676 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 629.169828] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2a833870cd38467ebef8966f0d86a676 [ 629.306769] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41870a40-39a0-4064-8810-c0afee846073 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.324656] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ecd01f6-3622-43f4-9bf0-58c188bd6bc9 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.332809] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1c0d1a39-e20c-4bdc-8047-e189fe99c536 tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Expecting reply to msg f602664fb79f427fbdd3ac1d1b742d5c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 629.357539] env[62096]: INFO nova.compute.manager [-] [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] Took 1.06 seconds to deallocate network for instance. [ 629.359142] env[62096]: DEBUG nova.compute.manager [None req-b165e3d0-efb4-4eaf-a7d6-1c4c9099d3ba tempest-ImagesOneServerTestJSON-1002718403 tempest-ImagesOneServerTestJSON-1002718403-project-member] [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] Start spawning the instance on the hypervisor. {{(pid=62096) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 629.364045] env[62096]: DEBUG nova.compute.claims [None req-b15e5306-4a15-4327-a851-ae7862f80688 tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] Aborting claim: {{(pid=62096) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 629.364291] env[62096]: DEBUG oslo_concurrency.lockutils [None req-b15e5306-4a15-4327-a851-ae7862f80688 tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 629.364775] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f602664fb79f427fbdd3ac1d1b742d5c [ 629.370097] env[62096]: DEBUG oslo_concurrency.lockutils [None req-dbe77661-ff6b-4cc0-8cbb-e6cfd25c54ab tempest-ServerDiagnosticsTest-1671101402 tempest-ServerDiagnosticsTest-1671101402-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.059s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 629.370730] env[62096]: ERROR nova.compute.manager [None req-dbe77661-ff6b-4cc0-8cbb-e6cfd25c54ab tempest-ServerDiagnosticsTest-1671101402 tempest-ServerDiagnosticsTest-1671101402-project-member] [instance: 9bc49e7a-af22-4112-8609-348605599692] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port e34eb22a-a84c-4ad4-be42-ea372c508428, please check neutron logs for more information. [ 629.370730] env[62096]: ERROR nova.compute.manager [instance: 9bc49e7a-af22-4112-8609-348605599692] Traceback (most recent call last): [ 629.370730] env[62096]: ERROR nova.compute.manager [instance: 9bc49e7a-af22-4112-8609-348605599692] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 629.370730] env[62096]: ERROR nova.compute.manager [instance: 9bc49e7a-af22-4112-8609-348605599692] self.driver.spawn(context, instance, image_meta, [ 629.370730] env[62096]: ERROR nova.compute.manager [instance: 9bc49e7a-af22-4112-8609-348605599692] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 629.370730] env[62096]: ERROR nova.compute.manager [instance: 9bc49e7a-af22-4112-8609-348605599692] self._vmops.spawn(context, instance, image_meta, injected_files, [ 629.370730] env[62096]: ERROR nova.compute.manager [instance: 9bc49e7a-af22-4112-8609-348605599692] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 629.370730] env[62096]: ERROR nova.compute.manager [instance: 9bc49e7a-af22-4112-8609-348605599692] vm_ref = self.build_virtual_machine(instance, [ 629.370730] env[62096]: ERROR nova.compute.manager [instance: 9bc49e7a-af22-4112-8609-348605599692] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 629.370730] env[62096]: ERROR nova.compute.manager [instance: 9bc49e7a-af22-4112-8609-348605599692] vif_infos = vmwarevif.get_vif_info(self._session, [ 629.370730] env[62096]: ERROR nova.compute.manager [instance: 9bc49e7a-af22-4112-8609-348605599692] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 629.371090] env[62096]: ERROR nova.compute.manager [instance: 9bc49e7a-af22-4112-8609-348605599692] for vif in network_info: [ 629.371090] env[62096]: ERROR nova.compute.manager [instance: 9bc49e7a-af22-4112-8609-348605599692] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 629.371090] env[62096]: ERROR nova.compute.manager [instance: 9bc49e7a-af22-4112-8609-348605599692] return self._sync_wrapper(fn, *args, **kwargs) [ 629.371090] env[62096]: ERROR nova.compute.manager [instance: 9bc49e7a-af22-4112-8609-348605599692] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 629.371090] env[62096]: ERROR nova.compute.manager [instance: 9bc49e7a-af22-4112-8609-348605599692] self.wait() [ 629.371090] env[62096]: ERROR nova.compute.manager [instance: 9bc49e7a-af22-4112-8609-348605599692] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 629.371090] env[62096]: ERROR nova.compute.manager [instance: 9bc49e7a-af22-4112-8609-348605599692] self[:] = self._gt.wait() [ 629.371090] env[62096]: ERROR nova.compute.manager [instance: 9bc49e7a-af22-4112-8609-348605599692] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 629.371090] env[62096]: ERROR nova.compute.manager [instance: 9bc49e7a-af22-4112-8609-348605599692] return self._exit_event.wait() [ 629.371090] env[62096]: ERROR nova.compute.manager [instance: 9bc49e7a-af22-4112-8609-348605599692] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 629.371090] env[62096]: ERROR nova.compute.manager [instance: 9bc49e7a-af22-4112-8609-348605599692] result = hub.switch() [ 629.371090] env[62096]: ERROR nova.compute.manager [instance: 9bc49e7a-af22-4112-8609-348605599692] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 629.371090] env[62096]: ERROR nova.compute.manager [instance: 9bc49e7a-af22-4112-8609-348605599692] return self.greenlet.switch() [ 629.371457] env[62096]: ERROR nova.compute.manager [instance: 9bc49e7a-af22-4112-8609-348605599692] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 629.371457] env[62096]: ERROR nova.compute.manager [instance: 9bc49e7a-af22-4112-8609-348605599692] result = function(*args, **kwargs) [ 629.371457] env[62096]: ERROR nova.compute.manager [instance: 9bc49e7a-af22-4112-8609-348605599692] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 629.371457] env[62096]: ERROR nova.compute.manager [instance: 9bc49e7a-af22-4112-8609-348605599692] return func(*args, **kwargs) [ 629.371457] env[62096]: ERROR nova.compute.manager [instance: 9bc49e7a-af22-4112-8609-348605599692] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 629.371457] env[62096]: ERROR nova.compute.manager [instance: 9bc49e7a-af22-4112-8609-348605599692] raise e [ 629.371457] env[62096]: ERROR nova.compute.manager [instance: 9bc49e7a-af22-4112-8609-348605599692] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 629.371457] env[62096]: ERROR nova.compute.manager [instance: 9bc49e7a-af22-4112-8609-348605599692] nwinfo = self.network_api.allocate_for_instance( [ 629.371457] env[62096]: ERROR nova.compute.manager [instance: 9bc49e7a-af22-4112-8609-348605599692] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 629.371457] env[62096]: ERROR nova.compute.manager [instance: 9bc49e7a-af22-4112-8609-348605599692] created_port_ids = self._update_ports_for_instance( [ 629.371457] env[62096]: ERROR nova.compute.manager [instance: 9bc49e7a-af22-4112-8609-348605599692] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 629.371457] env[62096]: ERROR nova.compute.manager [instance: 9bc49e7a-af22-4112-8609-348605599692] with excutils.save_and_reraise_exception(): [ 629.371457] env[62096]: ERROR nova.compute.manager [instance: 9bc49e7a-af22-4112-8609-348605599692] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 629.371792] env[62096]: ERROR nova.compute.manager [instance: 9bc49e7a-af22-4112-8609-348605599692] self.force_reraise() [ 629.371792] env[62096]: ERROR nova.compute.manager [instance: 9bc49e7a-af22-4112-8609-348605599692] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 629.371792] env[62096]: ERROR nova.compute.manager [instance: 9bc49e7a-af22-4112-8609-348605599692] raise self.value [ 629.371792] env[62096]: ERROR nova.compute.manager [instance: 9bc49e7a-af22-4112-8609-348605599692] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 629.371792] env[62096]: ERROR nova.compute.manager [instance: 9bc49e7a-af22-4112-8609-348605599692] updated_port = self._update_port( [ 629.371792] env[62096]: ERROR nova.compute.manager [instance: 9bc49e7a-af22-4112-8609-348605599692] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 629.371792] env[62096]: ERROR nova.compute.manager [instance: 9bc49e7a-af22-4112-8609-348605599692] _ensure_no_port_binding_failure(port) [ 629.371792] env[62096]: ERROR nova.compute.manager [instance: 9bc49e7a-af22-4112-8609-348605599692] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 629.371792] env[62096]: ERROR nova.compute.manager [instance: 9bc49e7a-af22-4112-8609-348605599692] raise exception.PortBindingFailed(port_id=port['id']) [ 629.371792] env[62096]: ERROR nova.compute.manager [instance: 9bc49e7a-af22-4112-8609-348605599692] nova.exception.PortBindingFailed: Binding failed for port e34eb22a-a84c-4ad4-be42-ea372c508428, please check neutron logs for more information. [ 629.371792] env[62096]: ERROR nova.compute.manager [instance: 9bc49e7a-af22-4112-8609-348605599692] [ 629.372118] env[62096]: DEBUG nova.compute.utils [None req-dbe77661-ff6b-4cc0-8cbb-e6cfd25c54ab tempest-ServerDiagnosticsTest-1671101402 tempest-ServerDiagnosticsTest-1671101402-project-member] [instance: 9bc49e7a-af22-4112-8609-348605599692] Binding failed for port e34eb22a-a84c-4ad4-be42-ea372c508428, please check neutron logs for more information. {{(pid=62096) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 629.372757] env[62096]: DEBUG oslo_concurrency.lockutils [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.121s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 629.374237] env[62096]: INFO nova.compute.claims [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] [instance: 27d12301-a049-4d1e-b171-a09a642703fb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 629.375802] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Expecting reply to msg db1bfd5cd33f44dda86c880afa223b97 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 629.380019] env[62096]: DEBUG nova.compute.manager [None req-dbe77661-ff6b-4cc0-8cbb-e6cfd25c54ab tempest-ServerDiagnosticsTest-1671101402 tempest-ServerDiagnosticsTest-1671101402-project-member] [instance: 9bc49e7a-af22-4112-8609-348605599692] Build of instance 9bc49e7a-af22-4112-8609-348605599692 was re-scheduled: Binding failed for port e34eb22a-a84c-4ad4-be42-ea372c508428, please check neutron logs for more information. {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 629.380457] env[62096]: DEBUG nova.compute.manager [None req-dbe77661-ff6b-4cc0-8cbb-e6cfd25c54ab tempest-ServerDiagnosticsTest-1671101402 tempest-ServerDiagnosticsTest-1671101402-project-member] [instance: 9bc49e7a-af22-4112-8609-348605599692] Unplugging VIFs for instance {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 629.380670] env[62096]: DEBUG oslo_concurrency.lockutils [None req-dbe77661-ff6b-4cc0-8cbb-e6cfd25c54ab tempest-ServerDiagnosticsTest-1671101402 tempest-ServerDiagnosticsTest-1671101402-project-member] Acquiring lock "refresh_cache-9bc49e7a-af22-4112-8609-348605599692" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 629.384237] env[62096]: DEBUG oslo_concurrency.lockutils [None req-dbe77661-ff6b-4cc0-8cbb-e6cfd25c54ab tempest-ServerDiagnosticsTest-1671101402 tempest-ServerDiagnosticsTest-1671101402-project-member] Acquired lock "refresh_cache-9bc49e7a-af22-4112-8609-348605599692" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 629.384446] env[62096]: DEBUG nova.network.neutron [None req-dbe77661-ff6b-4cc0-8cbb-e6cfd25c54ab tempest-ServerDiagnosticsTest-1671101402 tempest-ServerDiagnosticsTest-1671101402-project-member] [instance: 9bc49e7a-af22-4112-8609-348605599692] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 629.384917] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-dbe77661-ff6b-4cc0-8cbb-e6cfd25c54ab tempest-ServerDiagnosticsTest-1671101402 tempest-ServerDiagnosticsTest-1671101402-project-member] Expecting reply to msg 1fd2e83b6dc34b66b668c2e925f46cb2 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 629.388687] env[62096]: DEBUG oslo_concurrency.lockutils [req-71da149f-f742-40cc-8d09-9b342dd5340c req-15d02e32-9593-45c2-b330-e4477a3454ea service nova] Releasing lock "refresh_cache-e48a5bde-e299-4567-b952-3c5f096fb65d" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 629.388687] env[62096]: DEBUG nova.compute.manager [req-71da149f-f742-40cc-8d09-9b342dd5340c req-15d02e32-9593-45c2-b330-e4477a3454ea service nova] [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] Received event network-vif-deleted-013f7baa-f84e-483f-8fe4-b01cd8c88b38 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 629.391895] env[62096]: DEBUG nova.virt.hardware [None req-b165e3d0-efb4-4eaf-a7d6-1c4c9099d3ba tempest-ImagesOneServerTestJSON-1002718403 tempest-ImagesOneServerTestJSON-1002718403-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-19T11:52:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-19T11:51:55Z,direct_url=,disk_format='vmdk',id=fb764baa-3805-45c4-a694-aa91b0932110,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bd76085d004b408bbcf1ebf76ba2593e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-19T11:51:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 629.392121] env[62096]: DEBUG nova.virt.hardware [None req-b165e3d0-efb4-4eaf-a7d6-1c4c9099d3ba tempest-ImagesOneServerTestJSON-1002718403 tempest-ImagesOneServerTestJSON-1002718403-project-member] Flavor limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 629.392270] env[62096]: DEBUG nova.virt.hardware [None req-b165e3d0-efb4-4eaf-a7d6-1c4c9099d3ba tempest-ImagesOneServerTestJSON-1002718403 tempest-ImagesOneServerTestJSON-1002718403-project-member] Image limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 629.392445] env[62096]: DEBUG nova.virt.hardware [None req-b165e3d0-efb4-4eaf-a7d6-1c4c9099d3ba tempest-ImagesOneServerTestJSON-1002718403 tempest-ImagesOneServerTestJSON-1002718403-project-member] Flavor pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 629.392587] env[62096]: DEBUG nova.virt.hardware [None req-b165e3d0-efb4-4eaf-a7d6-1c4c9099d3ba tempest-ImagesOneServerTestJSON-1002718403 tempest-ImagesOneServerTestJSON-1002718403-project-member] Image pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 629.392731] env[62096]: DEBUG nova.virt.hardware [None req-b165e3d0-efb4-4eaf-a7d6-1c4c9099d3ba tempest-ImagesOneServerTestJSON-1002718403 tempest-ImagesOneServerTestJSON-1002718403-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 629.392930] env[62096]: DEBUG nova.virt.hardware [None req-b165e3d0-efb4-4eaf-a7d6-1c4c9099d3ba tempest-ImagesOneServerTestJSON-1002718403 tempest-ImagesOneServerTestJSON-1002718403-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 629.393081] env[62096]: DEBUG nova.virt.hardware [None req-b165e3d0-efb4-4eaf-a7d6-1c4c9099d3ba tempest-ImagesOneServerTestJSON-1002718403 tempest-ImagesOneServerTestJSON-1002718403-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 629.393237] env[62096]: DEBUG nova.virt.hardware [None req-b165e3d0-efb4-4eaf-a7d6-1c4c9099d3ba tempest-ImagesOneServerTestJSON-1002718403 tempest-ImagesOneServerTestJSON-1002718403-project-member] Got 1 possible topologies {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 629.393390] env[62096]: DEBUG nova.virt.hardware [None req-b165e3d0-efb4-4eaf-a7d6-1c4c9099d3ba tempest-ImagesOneServerTestJSON-1002718403 tempest-ImagesOneServerTestJSON-1002718403-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 629.393551] env[62096]: DEBUG nova.virt.hardware [None req-b165e3d0-efb4-4eaf-a7d6-1c4c9099d3ba tempest-ImagesOneServerTestJSON-1002718403 tempest-ImagesOneServerTestJSON-1002718403-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 629.394928] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b3a5789-8182-4b7a-9ce3-c76ec71517ae {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.397984] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1fd2e83b6dc34b66b668c2e925f46cb2 [ 629.414757] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f4f1b21-40ee-4c03-9944-ed576b3f368e {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.420657] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg db1bfd5cd33f44dda86c880afa223b97 [ 629.431924] env[62096]: ERROR nova.compute.manager [None req-b165e3d0-efb4-4eaf-a7d6-1c4c9099d3ba tempest-ImagesOneServerTestJSON-1002718403 tempest-ImagesOneServerTestJSON-1002718403-project-member] [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port ec4bdedd-76f7-4dad-92db-f301287338c1, please check neutron logs for more information. [ 629.431924] env[62096]: ERROR nova.compute.manager [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] Traceback (most recent call last): [ 629.431924] env[62096]: ERROR nova.compute.manager [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 629.431924] env[62096]: ERROR nova.compute.manager [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] yield resources [ 629.431924] env[62096]: ERROR nova.compute.manager [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 629.431924] env[62096]: ERROR nova.compute.manager [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] self.driver.spawn(context, instance, image_meta, [ 629.431924] env[62096]: ERROR nova.compute.manager [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 629.431924] env[62096]: ERROR nova.compute.manager [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] self._vmops.spawn(context, instance, image_meta, injected_files, [ 629.431924] env[62096]: ERROR nova.compute.manager [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 629.431924] env[62096]: ERROR nova.compute.manager [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] vm_ref = self.build_virtual_machine(instance, [ 629.431924] env[62096]: ERROR nova.compute.manager [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 629.432300] env[62096]: ERROR nova.compute.manager [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] vif_infos = vmwarevif.get_vif_info(self._session, [ 629.432300] env[62096]: ERROR nova.compute.manager [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 629.432300] env[62096]: ERROR nova.compute.manager [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] for vif in network_info: [ 629.432300] env[62096]: ERROR nova.compute.manager [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 629.432300] env[62096]: ERROR nova.compute.manager [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] return self._sync_wrapper(fn, *args, **kwargs) [ 629.432300] env[62096]: ERROR nova.compute.manager [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 629.432300] env[62096]: ERROR nova.compute.manager [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] self.wait() [ 629.432300] env[62096]: ERROR nova.compute.manager [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 629.432300] env[62096]: ERROR nova.compute.manager [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] self[:] = self._gt.wait() [ 629.432300] env[62096]: ERROR nova.compute.manager [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 629.432300] env[62096]: ERROR nova.compute.manager [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] return self._exit_event.wait() [ 629.432300] env[62096]: ERROR nova.compute.manager [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 629.432300] env[62096]: ERROR nova.compute.manager [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] current.throw(*self._exc) [ 629.432610] env[62096]: ERROR nova.compute.manager [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 629.432610] env[62096]: ERROR nova.compute.manager [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] result = function(*args, **kwargs) [ 629.432610] env[62096]: ERROR nova.compute.manager [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 629.432610] env[62096]: ERROR nova.compute.manager [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] return func(*args, **kwargs) [ 629.432610] env[62096]: ERROR nova.compute.manager [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 629.432610] env[62096]: ERROR nova.compute.manager [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] raise e [ 629.432610] env[62096]: ERROR nova.compute.manager [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 629.432610] env[62096]: ERROR nova.compute.manager [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] nwinfo = self.network_api.allocate_for_instance( [ 629.432610] env[62096]: ERROR nova.compute.manager [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 629.432610] env[62096]: ERROR nova.compute.manager [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] created_port_ids = self._update_ports_for_instance( [ 629.432610] env[62096]: ERROR nova.compute.manager [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 629.432610] env[62096]: ERROR nova.compute.manager [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] with excutils.save_and_reraise_exception(): [ 629.432610] env[62096]: ERROR nova.compute.manager [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 629.432929] env[62096]: ERROR nova.compute.manager [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] self.force_reraise() [ 629.432929] env[62096]: ERROR nova.compute.manager [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 629.432929] env[62096]: ERROR nova.compute.manager [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] raise self.value [ 629.432929] env[62096]: ERROR nova.compute.manager [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 629.432929] env[62096]: ERROR nova.compute.manager [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] updated_port = self._update_port( [ 629.432929] env[62096]: ERROR nova.compute.manager [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 629.432929] env[62096]: ERROR nova.compute.manager [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] _ensure_no_port_binding_failure(port) [ 629.432929] env[62096]: ERROR nova.compute.manager [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 629.432929] env[62096]: ERROR nova.compute.manager [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] raise exception.PortBindingFailed(port_id=port['id']) [ 629.432929] env[62096]: ERROR nova.compute.manager [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] nova.exception.PortBindingFailed: Binding failed for port ec4bdedd-76f7-4dad-92db-f301287338c1, please check neutron logs for more information. [ 629.432929] env[62096]: ERROR nova.compute.manager [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] [ 629.432929] env[62096]: INFO nova.compute.manager [None req-b165e3d0-efb4-4eaf-a7d6-1c4c9099d3ba tempest-ImagesOneServerTestJSON-1002718403 tempest-ImagesOneServerTestJSON-1002718403-project-member] [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] Terminating instance [ 629.434325] env[62096]: DEBUG oslo_concurrency.lockutils [None req-b165e3d0-efb4-4eaf-a7d6-1c4c9099d3ba tempest-ImagesOneServerTestJSON-1002718403 tempest-ImagesOneServerTestJSON-1002718403-project-member] Acquiring lock "refresh_cache-69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 629.434487] env[62096]: DEBUG oslo_concurrency.lockutils [None req-b165e3d0-efb4-4eaf-a7d6-1c4c9099d3ba tempest-ImagesOneServerTestJSON-1002718403 tempest-ImagesOneServerTestJSON-1002718403-project-member] Acquired lock "refresh_cache-69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 629.434652] env[62096]: DEBUG nova.network.neutron [None req-b165e3d0-efb4-4eaf-a7d6-1c4c9099d3ba tempest-ImagesOneServerTestJSON-1002718403 tempest-ImagesOneServerTestJSON-1002718403-project-member] [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 629.435105] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b165e3d0-efb4-4eaf-a7d6-1c4c9099d3ba tempest-ImagesOneServerTestJSON-1002718403 tempest-ImagesOneServerTestJSON-1002718403-project-member] Expecting reply to msg 6b2791b2e3604985b667afd07fc4fbdc in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 629.442876] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6b2791b2e3604985b667afd07fc4fbdc [ 629.653800] env[62096]: DEBUG oslo_concurrency.lockutils [None req-6f2dbda3-b665-4f79-8956-f4f8aac23815 tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Releasing lock "refresh_cache-12a4b44d-b6aa-45e2-bba9-d73f41fa4b61" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 629.654301] env[62096]: DEBUG nova.compute.manager [None req-6f2dbda3-b665-4f79-8956-f4f8aac23815 tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] [instance: 12a4b44d-b6aa-45e2-bba9-d73f41fa4b61] Start destroying the instance on the hypervisor. {{(pid=62096) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 629.654491] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-6f2dbda3-b665-4f79-8956-f4f8aac23815 tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] [instance: 12a4b44d-b6aa-45e2-bba9-d73f41fa4b61] Destroying instance {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 629.655457] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c0ef631-000e-4ed5-9f27-1db6d6855497 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.663313] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f2dbda3-b665-4f79-8956-f4f8aac23815 tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] [instance: 12a4b44d-b6aa-45e2-bba9-d73f41fa4b61] Powering off the VM {{(pid=62096) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 629.664034] env[62096]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6e248042-c204-4c8a-a9c7-947d29007b20 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.669296] env[62096]: DEBUG oslo_vmware.api [None req-6f2dbda3-b665-4f79-8956-f4f8aac23815 tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Waiting for the task: (returnval){ [ 629.669296] env[62096]: value = "task-397376" [ 629.669296] env[62096]: _type = "Task" [ 629.669296] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 629.678529] env[62096]: DEBUG oslo_vmware.api [None req-6f2dbda3-b665-4f79-8956-f4f8aac23815 tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Task: {'id': task-397376, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 629.836990] env[62096]: DEBUG nova.compute.manager [None req-1c0d1a39-e20c-4bdc-8047-e189fe99c536 tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] [instance: 12a4b44d-b6aa-45e2-bba9-d73f41fa4b61] Instance disappeared during snapshot {{(pid=62096) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4494}} [ 629.888559] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Expecting reply to msg 771f75471eb14290bff46e2d2e48eeb1 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 629.899029] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 771f75471eb14290bff46e2d2e48eeb1 [ 629.941397] env[62096]: DEBUG nova.network.neutron [None req-dbe77661-ff6b-4cc0-8cbb-e6cfd25c54ab tempest-ServerDiagnosticsTest-1671101402 tempest-ServerDiagnosticsTest-1671101402-project-member] [instance: 9bc49e7a-af22-4112-8609-348605599692] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 629.960855] env[62096]: DEBUG nova.network.neutron [None req-b165e3d0-efb4-4eaf-a7d6-1c4c9099d3ba tempest-ImagesOneServerTestJSON-1002718403 tempest-ImagesOneServerTestJSON-1002718403-project-member] [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 629.994790] env[62096]: DEBUG nova.compute.manager [None req-1c0d1a39-e20c-4bdc-8047-e189fe99c536 tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] [instance: 12a4b44d-b6aa-45e2-bba9-d73f41fa4b61] Found 0 images (rotation: 2) {{(pid=62096) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4554}} [ 629.995676] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1c0d1a39-e20c-4bdc-8047-e189fe99c536 tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Expecting reply to msg 6bf2ca676e8a47bbac6d188656a641af in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 630.004288] env[62096]: DEBUG nova.compute.manager [req-7d0ddf08-4293-4369-b4cd-02ef9925433c req-0c3fb998-4816-4509-8c3c-bca289b78b43 service nova] [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] Received event network-changed-ec4bdedd-76f7-4dad-92db-f301287338c1 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 630.004422] env[62096]: DEBUG nova.compute.manager [req-7d0ddf08-4293-4369-b4cd-02ef9925433c req-0c3fb998-4816-4509-8c3c-bca289b78b43 service nova] [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] Refreshing instance network info cache due to event network-changed-ec4bdedd-76f7-4dad-92db-f301287338c1. {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 630.004639] env[62096]: DEBUG oslo_concurrency.lockutils [req-7d0ddf08-4293-4369-b4cd-02ef9925433c req-0c3fb998-4816-4509-8c3c-bca289b78b43 service nova] Acquiring lock "refresh_cache-69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 630.009223] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6bf2ca676e8a47bbac6d188656a641af [ 630.056339] env[62096]: DEBUG nova.network.neutron [None req-dbe77661-ff6b-4cc0-8cbb-e6cfd25c54ab tempest-ServerDiagnosticsTest-1671101402 tempest-ServerDiagnosticsTest-1671101402-project-member] [instance: 9bc49e7a-af22-4112-8609-348605599692] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 630.057372] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-dbe77661-ff6b-4cc0-8cbb-e6cfd25c54ab tempest-ServerDiagnosticsTest-1671101402 tempest-ServerDiagnosticsTest-1671101402-project-member] Expecting reply to msg ea2e92073c384b669cef8f4609073606 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 630.070219] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ea2e92073c384b669cef8f4609073606 [ 630.072587] env[62096]: DEBUG nova.network.neutron [None req-b165e3d0-efb4-4eaf-a7d6-1c4c9099d3ba tempest-ImagesOneServerTestJSON-1002718403 tempest-ImagesOneServerTestJSON-1002718403-project-member] [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 630.073077] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b165e3d0-efb4-4eaf-a7d6-1c4c9099d3ba tempest-ImagesOneServerTestJSON-1002718403 tempest-ImagesOneServerTestJSON-1002718403-project-member] Expecting reply to msg 2cc64028a8cb4aa79bb6d8e5cfe5e42d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 630.081591] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2cc64028a8cb4aa79bb6d8e5cfe5e42d [ 630.179835] env[62096]: DEBUG oslo_vmware.api [None req-6f2dbda3-b665-4f79-8956-f4f8aac23815 tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Task: {'id': task-397376, 'name': PowerOffVM_Task, 'duration_secs': 0.114752} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 630.182540] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f2dbda3-b665-4f79-8956-f4f8aac23815 tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] [instance: 12a4b44d-b6aa-45e2-bba9-d73f41fa4b61] Powered off the VM {{(pid=62096) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 630.182707] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-6f2dbda3-b665-4f79-8956-f4f8aac23815 tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] [instance: 12a4b44d-b6aa-45e2-bba9-d73f41fa4b61] Unregistering the VM {{(pid=62096) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 630.183132] env[62096]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f1afd3f1-77b7-4ed8-957a-d4b4bb72e40c {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.204969] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-6f2dbda3-b665-4f79-8956-f4f8aac23815 tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] [instance: 12a4b44d-b6aa-45e2-bba9-d73f41fa4b61] Unregistered the VM {{(pid=62096) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 630.205175] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-6f2dbda3-b665-4f79-8956-f4f8aac23815 tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] [instance: 12a4b44d-b6aa-45e2-bba9-d73f41fa4b61] Deleting contents of the VM from datastore datastore2 {{(pid=62096) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 630.205349] env[62096]: DEBUG nova.virt.vmwareapi.ds_util [None req-6f2dbda3-b665-4f79-8956-f4f8aac23815 tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Deleting the datastore file [datastore2] 12a4b44d-b6aa-45e2-bba9-d73f41fa4b61 {{(pid=62096) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 630.205595] env[62096]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5892147e-099a-44ad-9f9f-6e10755b3770 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.211740] env[62096]: DEBUG oslo_vmware.api [None req-6f2dbda3-b665-4f79-8956-f4f8aac23815 tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Waiting for the task: (returnval){ [ 630.211740] env[62096]: value = "task-397378" [ 630.211740] env[62096]: _type = "Task" [ 630.211740] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 630.220880] env[62096]: DEBUG oslo_vmware.api [None req-6f2dbda3-b665-4f79-8956-f4f8aac23815 tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Task: {'id': task-397378, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 630.370685] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b8b398c-2824-413b-9edb-8b066ed03aed {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.378170] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d73a0f4-5f13-4b23-ab71-3b519986a589 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.409092] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17ba3b99-0154-4986-9359-190e7161df66 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.417434] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e09b6b9-c9a3-446b-972a-da4fdb91ccca {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.431994] env[62096]: DEBUG nova.compute.provider_tree [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 630.432530] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Expecting reply to msg 1853b997887648d78be8970aab221825 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 630.439374] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1853b997887648d78be8970aab221825 [ 630.564125] env[62096]: DEBUG oslo_concurrency.lockutils [None req-dbe77661-ff6b-4cc0-8cbb-e6cfd25c54ab tempest-ServerDiagnosticsTest-1671101402 tempest-ServerDiagnosticsTest-1671101402-project-member] Releasing lock "refresh_cache-9bc49e7a-af22-4112-8609-348605599692" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 630.564371] env[62096]: DEBUG nova.compute.manager [None req-dbe77661-ff6b-4cc0-8cbb-e6cfd25c54ab tempest-ServerDiagnosticsTest-1671101402 tempest-ServerDiagnosticsTest-1671101402-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 630.564536] env[62096]: DEBUG nova.compute.manager [None req-dbe77661-ff6b-4cc0-8cbb-e6cfd25c54ab tempest-ServerDiagnosticsTest-1671101402 tempest-ServerDiagnosticsTest-1671101402-project-member] [instance: 9bc49e7a-af22-4112-8609-348605599692] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 630.564693] env[62096]: DEBUG nova.network.neutron [None req-dbe77661-ff6b-4cc0-8cbb-e6cfd25c54ab tempest-ServerDiagnosticsTest-1671101402 tempest-ServerDiagnosticsTest-1671101402-project-member] [instance: 9bc49e7a-af22-4112-8609-348605599692] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 630.575576] env[62096]: DEBUG oslo_concurrency.lockutils [None req-b165e3d0-efb4-4eaf-a7d6-1c4c9099d3ba tempest-ImagesOneServerTestJSON-1002718403 tempest-ImagesOneServerTestJSON-1002718403-project-member] Releasing lock "refresh_cache-69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 630.575957] env[62096]: DEBUG nova.compute.manager [None req-b165e3d0-efb4-4eaf-a7d6-1c4c9099d3ba tempest-ImagesOneServerTestJSON-1002718403 tempest-ImagesOneServerTestJSON-1002718403-project-member] [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] Start destroying the instance on the hypervisor. {{(pid=62096) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 630.576172] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-b165e3d0-efb4-4eaf-a7d6-1c4c9099d3ba tempest-ImagesOneServerTestJSON-1002718403 tempest-ImagesOneServerTestJSON-1002718403-project-member] [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] Destroying instance {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 630.576493] env[62096]: DEBUG oslo_concurrency.lockutils [req-7d0ddf08-4293-4369-b4cd-02ef9925433c req-0c3fb998-4816-4509-8c3c-bca289b78b43 service nova] Acquired lock "refresh_cache-69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 630.576799] env[62096]: DEBUG nova.network.neutron [req-7d0ddf08-4293-4369-b4cd-02ef9925433c req-0c3fb998-4816-4509-8c3c-bca289b78b43 service nova] [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] Refreshing network info cache for port ec4bdedd-76f7-4dad-92db-f301287338c1 {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 630.577239] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-7d0ddf08-4293-4369-b4cd-02ef9925433c req-0c3fb998-4816-4509-8c3c-bca289b78b43 service nova] Expecting reply to msg 2f268a399cfe4101b227ff795c7f03aa in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 630.578517] env[62096]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-469550f4-f4c8-4c5c-a26c-80405ca18b1f {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.585854] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2f268a399cfe4101b227ff795c7f03aa [ 630.588814] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ca8043b-089c-4dab-a813-7ca94519eb1f {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.609851] env[62096]: DEBUG nova.network.neutron [None req-dbe77661-ff6b-4cc0-8cbb-e6cfd25c54ab tempest-ServerDiagnosticsTest-1671101402 tempest-ServerDiagnosticsTest-1671101402-project-member] [instance: 9bc49e7a-af22-4112-8609-348605599692] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 630.610419] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-dbe77661-ff6b-4cc0-8cbb-e6cfd25c54ab tempest-ServerDiagnosticsTest-1671101402 tempest-ServerDiagnosticsTest-1671101402-project-member] Expecting reply to msg 451eb7c99fc14e7f80bd9ba9c2e22859 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 630.617871] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 451eb7c99fc14e7f80bd9ba9c2e22859 [ 630.624782] env[62096]: WARNING nova.virt.vmwareapi.vmops [None req-b165e3d0-efb4-4eaf-a7d6-1c4c9099d3ba tempest-ImagesOneServerTestJSON-1002718403 tempest-ImagesOneServerTestJSON-1002718403-project-member] [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb could not be found. [ 630.624909] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-b165e3d0-efb4-4eaf-a7d6-1c4c9099d3ba tempest-ImagesOneServerTestJSON-1002718403 tempest-ImagesOneServerTestJSON-1002718403-project-member] [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] Instance destroyed {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 630.625082] env[62096]: INFO nova.compute.manager [None req-b165e3d0-efb4-4eaf-a7d6-1c4c9099d3ba tempest-ImagesOneServerTestJSON-1002718403 tempest-ImagesOneServerTestJSON-1002718403-project-member] [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] Took 0.05 seconds to destroy the instance on the hypervisor. [ 630.625312] env[62096]: DEBUG oslo.service.loopingcall [None req-b165e3d0-efb4-4eaf-a7d6-1c4c9099d3ba tempest-ImagesOneServerTestJSON-1002718403 tempest-ImagesOneServerTestJSON-1002718403-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 630.625608] env[62096]: DEBUG nova.compute.manager [-] [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 630.625608] env[62096]: DEBUG nova.network.neutron [-] [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 630.641721] env[62096]: DEBUG nova.network.neutron [-] [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 630.642215] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg f858c5c5c30d475e9ba218a5ecccf956 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 630.654815] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f858c5c5c30d475e9ba218a5ecccf956 [ 630.725836] env[62096]: DEBUG oslo_vmware.api [None req-6f2dbda3-b665-4f79-8956-f4f8aac23815 tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Task: {'id': task-397378, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.096986} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 630.726110] env[62096]: DEBUG nova.virt.vmwareapi.ds_util [None req-6f2dbda3-b665-4f79-8956-f4f8aac23815 tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Deleted the datastore file {{(pid=62096) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 630.726333] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-6f2dbda3-b665-4f79-8956-f4f8aac23815 tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] [instance: 12a4b44d-b6aa-45e2-bba9-d73f41fa4b61] Deleted contents of the VM from datastore datastore2 {{(pid=62096) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 630.726485] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-6f2dbda3-b665-4f79-8956-f4f8aac23815 tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] [instance: 12a4b44d-b6aa-45e2-bba9-d73f41fa4b61] Instance destroyed {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 630.726649] env[62096]: INFO nova.compute.manager [None req-6f2dbda3-b665-4f79-8956-f4f8aac23815 tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] [instance: 12a4b44d-b6aa-45e2-bba9-d73f41fa4b61] Took 1.07 seconds to destroy the instance on the hypervisor. [ 630.727221] env[62096]: DEBUG oslo.service.loopingcall [None req-6f2dbda3-b665-4f79-8956-f4f8aac23815 tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 630.727408] env[62096]: DEBUG nova.compute.manager [-] [instance: 12a4b44d-b6aa-45e2-bba9-d73f41fa4b61] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 630.727497] env[62096]: DEBUG nova.network.neutron [-] [instance: 12a4b44d-b6aa-45e2-bba9-d73f41fa4b61] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 630.743135] env[62096]: DEBUG nova.network.neutron [-] [instance: 12a4b44d-b6aa-45e2-bba9-d73f41fa4b61] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 630.743767] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 3524926f75e14a1aa623ff336c4d203a in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 630.750793] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3524926f75e14a1aa623ff336c4d203a [ 630.935288] env[62096]: DEBUG nova.scheduler.client.report [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 630.939256] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Expecting reply to msg 1875f689159e41a89a5f754a5f7b87ff in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 630.951118] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1875f689159e41a89a5f754a5f7b87ff [ 631.093692] env[62096]: DEBUG nova.network.neutron [req-7d0ddf08-4293-4369-b4cd-02ef9925433c req-0c3fb998-4816-4509-8c3c-bca289b78b43 service nova] [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 631.113162] env[62096]: DEBUG nova.network.neutron [None req-dbe77661-ff6b-4cc0-8cbb-e6cfd25c54ab tempest-ServerDiagnosticsTest-1671101402 tempest-ServerDiagnosticsTest-1671101402-project-member] [instance: 9bc49e7a-af22-4112-8609-348605599692] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 631.113713] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-dbe77661-ff6b-4cc0-8cbb-e6cfd25c54ab tempest-ServerDiagnosticsTest-1671101402 tempest-ServerDiagnosticsTest-1671101402-project-member] Expecting reply to msg 880a216cb5464c47a0aed9a640353830 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 631.123273] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 880a216cb5464c47a0aed9a640353830 [ 631.144482] env[62096]: DEBUG nova.network.neutron [-] [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 631.144923] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 070ed9bbed07472da8b514a628d7ee55 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 631.157209] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 070ed9bbed07472da8b514a628d7ee55 [ 631.185754] env[62096]: DEBUG nova.network.neutron [req-7d0ddf08-4293-4369-b4cd-02ef9925433c req-0c3fb998-4816-4509-8c3c-bca289b78b43 service nova] [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 631.185754] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-7d0ddf08-4293-4369-b4cd-02ef9925433c req-0c3fb998-4816-4509-8c3c-bca289b78b43 service nova] Expecting reply to msg c94972ca6aa24a748c46711d76a2c1da in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 631.192974] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c94972ca6aa24a748c46711d76a2c1da [ 631.248026] env[62096]: DEBUG nova.network.neutron [-] [instance: 12a4b44d-b6aa-45e2-bba9-d73f41fa4b61] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 631.248026] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 5378544a2dbf44b4b420998346136f5c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 631.256104] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5378544a2dbf44b4b420998346136f5c [ 631.448507] env[62096]: DEBUG oslo_concurrency.lockutils [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.069s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 631.448507] env[62096]: DEBUG nova.compute.manager [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] [instance: 27d12301-a049-4d1e-b171-a09a642703fb] Start building networks asynchronously for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 631.448507] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Expecting reply to msg 0a75bc2575ab45c783d7e51593673af9 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 631.448507] env[62096]: DEBUG oslo_concurrency.lockutils [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.079s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 631.448507] env[62096]: INFO nova.compute.claims [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 631.450033] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Expecting reply to msg 15467889fafe4dc79bc9defba301eb6c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 631.504476] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0a75bc2575ab45c783d7e51593673af9 [ 631.505643] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 15467889fafe4dc79bc9defba301eb6c [ 631.615857] env[62096]: INFO nova.compute.manager [None req-dbe77661-ff6b-4cc0-8cbb-e6cfd25c54ab tempest-ServerDiagnosticsTest-1671101402 tempest-ServerDiagnosticsTest-1671101402-project-member] [instance: 9bc49e7a-af22-4112-8609-348605599692] Took 1.05 seconds to deallocate network for instance. [ 631.617518] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-dbe77661-ff6b-4cc0-8cbb-e6cfd25c54ab tempest-ServerDiagnosticsTest-1671101402 tempest-ServerDiagnosticsTest-1671101402-project-member] Expecting reply to msg fe9907d49dec4544ab10a1402d7ff0b3 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 631.646997] env[62096]: INFO nova.compute.manager [-] [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] Took 1.02 seconds to deallocate network for instance. [ 631.649358] env[62096]: DEBUG nova.compute.claims [None req-b165e3d0-efb4-4eaf-a7d6-1c4c9099d3ba tempest-ImagesOneServerTestJSON-1002718403 tempest-ImagesOneServerTestJSON-1002718403-project-member] [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] Aborting claim: {{(pid=62096) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 631.649535] env[62096]: DEBUG oslo_concurrency.lockutils [None req-b165e3d0-efb4-4eaf-a7d6-1c4c9099d3ba tempest-ImagesOneServerTestJSON-1002718403 tempest-ImagesOneServerTestJSON-1002718403-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 631.655334] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fe9907d49dec4544ab10a1402d7ff0b3 [ 631.687249] env[62096]: DEBUG oslo_concurrency.lockutils [req-7d0ddf08-4293-4369-b4cd-02ef9925433c req-0c3fb998-4816-4509-8c3c-bca289b78b43 service nova] Releasing lock "refresh_cache-69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 631.687506] env[62096]: DEBUG nova.compute.manager [req-7d0ddf08-4293-4369-b4cd-02ef9925433c req-0c3fb998-4816-4509-8c3c-bca289b78b43 service nova] [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] Received event network-vif-deleted-ec4bdedd-76f7-4dad-92db-f301287338c1 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 631.750097] env[62096]: INFO nova.compute.manager [-] [instance: 12a4b44d-b6aa-45e2-bba9-d73f41fa4b61] Took 1.02 seconds to deallocate network for instance. [ 631.754727] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-6f2dbda3-b665-4f79-8956-f4f8aac23815 tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Expecting reply to msg 41633d675f544e539199dc03638d5b25 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 631.801135] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 41633d675f544e539199dc03638d5b25 [ 631.953491] env[62096]: DEBUG nova.compute.utils [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Using /dev/sd instead of None {{(pid=62096) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 631.954132] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Expecting reply to msg bed693387d154768972ef7abe3875834 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 631.956480] env[62096]: DEBUG nova.compute.manager [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] [instance: 27d12301-a049-4d1e-b171-a09a642703fb] Allocating IP information in the background. {{(pid=62096) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 631.956480] env[62096]: DEBUG nova.network.neutron [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] [instance: 27d12301-a049-4d1e-b171-a09a642703fb] allocate_for_instance() {{(pid=62096) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 631.960656] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Expecting reply to msg db5b3e18d90846198c63ee1611b0e8cc in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 631.967408] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bed693387d154768972ef7abe3875834 [ 631.971989] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg db5b3e18d90846198c63ee1611b0e8cc [ 632.008329] env[62096]: DEBUG nova.policy [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '42d087a1fa2142dabae11c51b21a4f5c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '520f93d17d0c4b48beebc0ecac919190', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62096) authorize /opt/stack/nova/nova/policy.py:203}} [ 632.122224] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-dbe77661-ff6b-4cc0-8cbb-e6cfd25c54ab tempest-ServerDiagnosticsTest-1671101402 tempest-ServerDiagnosticsTest-1671101402-project-member] Expecting reply to msg 4d3b41afb24041f083cd55470a2bfd2e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 632.157244] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4d3b41afb24041f083cd55470a2bfd2e [ 632.258340] env[62096]: DEBUG oslo_concurrency.lockutils [None req-6f2dbda3-b665-4f79-8956-f4f8aac23815 tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 632.273277] env[62096]: DEBUG nova.network.neutron [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] [instance: 27d12301-a049-4d1e-b171-a09a642703fb] Successfully created port: bdb9b731-7251-4c30-a6e1-466fc3302e95 {{(pid=62096) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 632.465195] env[62096]: DEBUG nova.compute.manager [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] [instance: 27d12301-a049-4d1e-b171-a09a642703fb] Start building block device mappings for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 632.465195] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Expecting reply to msg ca3ff20c4d244b1fb3d62b9215d24825 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 632.504884] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ca3ff20c4d244b1fb3d62b9215d24825 [ 632.648806] env[62096]: INFO nova.scheduler.client.report [None req-dbe77661-ff6b-4cc0-8cbb-e6cfd25c54ab tempest-ServerDiagnosticsTest-1671101402 tempest-ServerDiagnosticsTest-1671101402-project-member] Deleted allocations for instance 9bc49e7a-af22-4112-8609-348605599692 [ 632.654315] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-dbe77661-ff6b-4cc0-8cbb-e6cfd25c54ab tempest-ServerDiagnosticsTest-1671101402 tempest-ServerDiagnosticsTest-1671101402-project-member] Expecting reply to msg 809edaaada0e423895aae0d036038364 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 632.667110] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 809edaaada0e423895aae0d036038364 [ 632.836774] env[62096]: DEBUG oslo_service.periodic_task [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62096) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 632.839659] env[62096]: DEBUG oslo_service.periodic_task [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62096) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 632.840545] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg 3ecad7528bf2499a8c6185ed9a33e414 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 632.858481] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3ecad7528bf2499a8c6185ed9a33e414 [ 632.960437] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30c1d376-80e3-4c6c-beb9-afdfaf8f46e7 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.970491] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Expecting reply to msg 40232dc382c24d72bf462071a09bbe2f in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 632.973418] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a08cbb9-e733-4cd2-a64e-7e5d6a7dd6a8 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.015677] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 40232dc382c24d72bf462071a09bbe2f [ 633.016991] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b25520b-6c01-403b-975d-0da5862aaec3 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.024920] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45b7c0df-f93a-4513-88b9-37d51cad9d69 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.041171] env[62096]: DEBUG nova.compute.provider_tree [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 633.041821] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Expecting reply to msg b1de4a3e26134ca2a6aa9c1bfe250aac in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 633.050495] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b1de4a3e26134ca2a6aa9c1bfe250aac [ 633.156761] env[62096]: DEBUG oslo_concurrency.lockutils [None req-dbe77661-ff6b-4cc0-8cbb-e6cfd25c54ab tempest-ServerDiagnosticsTest-1671101402 tempest-ServerDiagnosticsTest-1671101402-project-member] Lock "9bc49e7a-af22-4112-8609-348605599692" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 75.048s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 633.163199] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b67d5e24-acb6-4b89-8ad7-369461375b68 tempest-ServerExternalEventsTest-882999065 tempest-ServerExternalEventsTest-882999065-project-member] Expecting reply to msg 0e69158f63e443a6a80c2199f604ce49 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 633.176656] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0e69158f63e443a6a80c2199f604ce49 [ 633.277447] env[62096]: DEBUG nova.compute.manager [req-776d6878-9b52-4ee8-976b-21059e9f22ae req-9c87d526-d92f-45b7-ae86-b39446ceaf81 service nova] [instance: 27d12301-a049-4d1e-b171-a09a642703fb] Received event network-changed-bdb9b731-7251-4c30-a6e1-466fc3302e95 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 633.277689] env[62096]: DEBUG nova.compute.manager [req-776d6878-9b52-4ee8-976b-21059e9f22ae req-9c87d526-d92f-45b7-ae86-b39446ceaf81 service nova] [instance: 27d12301-a049-4d1e-b171-a09a642703fb] Refreshing instance network info cache due to event network-changed-bdb9b731-7251-4c30-a6e1-466fc3302e95. {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 633.277923] env[62096]: DEBUG oslo_concurrency.lockutils [req-776d6878-9b52-4ee8-976b-21059e9f22ae req-9c87d526-d92f-45b7-ae86-b39446ceaf81 service nova] Acquiring lock "refresh_cache-27d12301-a049-4d1e-b171-a09a642703fb" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 633.278081] env[62096]: DEBUG oslo_concurrency.lockutils [req-776d6878-9b52-4ee8-976b-21059e9f22ae req-9c87d526-d92f-45b7-ae86-b39446ceaf81 service nova] Acquired lock "refresh_cache-27d12301-a049-4d1e-b171-a09a642703fb" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 633.278335] env[62096]: DEBUG nova.network.neutron [req-776d6878-9b52-4ee8-976b-21059e9f22ae req-9c87d526-d92f-45b7-ae86-b39446ceaf81 service nova] [instance: 27d12301-a049-4d1e-b171-a09a642703fb] Refreshing network info cache for port bdb9b731-7251-4c30-a6e1-466fc3302e95 {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 633.278902] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-776d6878-9b52-4ee8-976b-21059e9f22ae req-9c87d526-d92f-45b7-ae86-b39446ceaf81 service nova] Expecting reply to msg b6d79e46662b485e881268427052e32a in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 633.286392] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b6d79e46662b485e881268427052e32a [ 633.345656] env[62096]: DEBUG oslo_service.periodic_task [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62096) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 633.345814] env[62096]: DEBUG nova.compute.manager [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Starting heal instance info cache {{(pid=62096) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9926}} [ 633.345915] env[62096]: DEBUG nova.compute.manager [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Rebuilding the list of instances to heal {{(pid=62096) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 633.346487] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg a2bc6732725d4d28a29da8ffa2c97f66 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 633.361446] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a2bc6732725d4d28a29da8ffa2c97f66 [ 633.363022] env[62096]: ERROR nova.compute.manager [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port bdb9b731-7251-4c30-a6e1-466fc3302e95, please check neutron logs for more information. [ 633.363022] env[62096]: ERROR nova.compute.manager Traceback (most recent call last): [ 633.363022] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 633.363022] env[62096]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 633.363022] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 633.363022] env[62096]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 633.363022] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 633.363022] env[62096]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 633.363022] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 633.363022] env[62096]: ERROR nova.compute.manager self.force_reraise() [ 633.363022] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 633.363022] env[62096]: ERROR nova.compute.manager raise self.value [ 633.363022] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 633.363022] env[62096]: ERROR nova.compute.manager updated_port = self._update_port( [ 633.363022] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 633.363022] env[62096]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 633.363420] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 633.363420] env[62096]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 633.363420] env[62096]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port bdb9b731-7251-4c30-a6e1-466fc3302e95, please check neutron logs for more information. [ 633.363420] env[62096]: ERROR nova.compute.manager [ 633.363420] env[62096]: Traceback (most recent call last): [ 633.363420] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 633.363420] env[62096]: listener.cb(fileno) [ 633.363420] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 633.363420] env[62096]: result = function(*args, **kwargs) [ 633.363420] env[62096]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 633.363420] env[62096]: return func(*args, **kwargs) [ 633.363420] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 633.363420] env[62096]: raise e [ 633.363420] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 633.363420] env[62096]: nwinfo = self.network_api.allocate_for_instance( [ 633.363420] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 633.363420] env[62096]: created_port_ids = self._update_ports_for_instance( [ 633.363420] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 633.363420] env[62096]: with excutils.save_and_reraise_exception(): [ 633.363420] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 633.363420] env[62096]: self.force_reraise() [ 633.363420] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 633.363420] env[62096]: raise self.value [ 633.363420] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 633.363420] env[62096]: updated_port = self._update_port( [ 633.363420] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 633.363420] env[62096]: _ensure_no_port_binding_failure(port) [ 633.363420] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 633.363420] env[62096]: raise exception.PortBindingFailed(port_id=port['id']) [ 633.364512] env[62096]: nova.exception.PortBindingFailed: Binding failed for port bdb9b731-7251-4c30-a6e1-466fc3302e95, please check neutron logs for more information. [ 633.364512] env[62096]: Removing descriptor: 14 [ 633.473455] env[62096]: DEBUG nova.compute.manager [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] [instance: 27d12301-a049-4d1e-b171-a09a642703fb] Start spawning the instance on the hypervisor. {{(pid=62096) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 633.497874] env[62096]: DEBUG nova.virt.hardware [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-19T11:52:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-19T11:51:55Z,direct_url=,disk_format='vmdk',id=fb764baa-3805-45c4-a694-aa91b0932110,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bd76085d004b408bbcf1ebf76ba2593e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-19T11:51:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 633.497949] env[62096]: DEBUG nova.virt.hardware [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Flavor limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 633.498111] env[62096]: DEBUG nova.virt.hardware [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Image limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 633.498658] env[62096]: DEBUG nova.virt.hardware [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Flavor pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 633.498658] env[62096]: DEBUG nova.virt.hardware [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Image pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 633.498658] env[62096]: DEBUG nova.virt.hardware [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 633.498797] env[62096]: DEBUG nova.virt.hardware [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 633.499064] env[62096]: DEBUG nova.virt.hardware [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 633.499064] env[62096]: DEBUG nova.virt.hardware [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Got 1 possible topologies {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 633.499570] env[62096]: DEBUG nova.virt.hardware [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 633.499570] env[62096]: DEBUG nova.virt.hardware [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 633.500888] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1d2d97d-bd36-496c-b70c-681b48429c8a {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.508226] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7452d91e-dae4-49a9-89b9-5dad18e96158 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.522280] env[62096]: ERROR nova.compute.manager [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] [instance: 27d12301-a049-4d1e-b171-a09a642703fb] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port bdb9b731-7251-4c30-a6e1-466fc3302e95, please check neutron logs for more information. [ 633.522280] env[62096]: ERROR nova.compute.manager [instance: 27d12301-a049-4d1e-b171-a09a642703fb] Traceback (most recent call last): [ 633.522280] env[62096]: ERROR nova.compute.manager [instance: 27d12301-a049-4d1e-b171-a09a642703fb] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 633.522280] env[62096]: ERROR nova.compute.manager [instance: 27d12301-a049-4d1e-b171-a09a642703fb] yield resources [ 633.522280] env[62096]: ERROR nova.compute.manager [instance: 27d12301-a049-4d1e-b171-a09a642703fb] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 633.522280] env[62096]: ERROR nova.compute.manager [instance: 27d12301-a049-4d1e-b171-a09a642703fb] self.driver.spawn(context, instance, image_meta, [ 633.522280] env[62096]: ERROR nova.compute.manager [instance: 27d12301-a049-4d1e-b171-a09a642703fb] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 633.522280] env[62096]: ERROR nova.compute.manager [instance: 27d12301-a049-4d1e-b171-a09a642703fb] self._vmops.spawn(context, instance, image_meta, injected_files, [ 633.522280] env[62096]: ERROR nova.compute.manager [instance: 27d12301-a049-4d1e-b171-a09a642703fb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 633.522280] env[62096]: ERROR nova.compute.manager [instance: 27d12301-a049-4d1e-b171-a09a642703fb] vm_ref = self.build_virtual_machine(instance, [ 633.522280] env[62096]: ERROR nova.compute.manager [instance: 27d12301-a049-4d1e-b171-a09a642703fb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 633.522657] env[62096]: ERROR nova.compute.manager [instance: 27d12301-a049-4d1e-b171-a09a642703fb] vif_infos = vmwarevif.get_vif_info(self._session, [ 633.522657] env[62096]: ERROR nova.compute.manager [instance: 27d12301-a049-4d1e-b171-a09a642703fb] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 633.522657] env[62096]: ERROR nova.compute.manager [instance: 27d12301-a049-4d1e-b171-a09a642703fb] for vif in network_info: [ 633.522657] env[62096]: ERROR nova.compute.manager [instance: 27d12301-a049-4d1e-b171-a09a642703fb] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 633.522657] env[62096]: ERROR nova.compute.manager [instance: 27d12301-a049-4d1e-b171-a09a642703fb] return self._sync_wrapper(fn, *args, **kwargs) [ 633.522657] env[62096]: ERROR nova.compute.manager [instance: 27d12301-a049-4d1e-b171-a09a642703fb] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 633.522657] env[62096]: ERROR nova.compute.manager [instance: 27d12301-a049-4d1e-b171-a09a642703fb] self.wait() [ 633.522657] env[62096]: ERROR nova.compute.manager [instance: 27d12301-a049-4d1e-b171-a09a642703fb] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 633.522657] env[62096]: ERROR nova.compute.manager [instance: 27d12301-a049-4d1e-b171-a09a642703fb] self[:] = self._gt.wait() [ 633.522657] env[62096]: ERROR nova.compute.manager [instance: 27d12301-a049-4d1e-b171-a09a642703fb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 633.522657] env[62096]: ERROR nova.compute.manager [instance: 27d12301-a049-4d1e-b171-a09a642703fb] return self._exit_event.wait() [ 633.522657] env[62096]: ERROR nova.compute.manager [instance: 27d12301-a049-4d1e-b171-a09a642703fb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 633.522657] env[62096]: ERROR nova.compute.manager [instance: 27d12301-a049-4d1e-b171-a09a642703fb] current.throw(*self._exc) [ 633.523193] env[62096]: ERROR nova.compute.manager [instance: 27d12301-a049-4d1e-b171-a09a642703fb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 633.523193] env[62096]: ERROR nova.compute.manager [instance: 27d12301-a049-4d1e-b171-a09a642703fb] result = function(*args, **kwargs) [ 633.523193] env[62096]: ERROR nova.compute.manager [instance: 27d12301-a049-4d1e-b171-a09a642703fb] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 633.523193] env[62096]: ERROR nova.compute.manager [instance: 27d12301-a049-4d1e-b171-a09a642703fb] return func(*args, **kwargs) [ 633.523193] env[62096]: ERROR nova.compute.manager [instance: 27d12301-a049-4d1e-b171-a09a642703fb] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 633.523193] env[62096]: ERROR nova.compute.manager [instance: 27d12301-a049-4d1e-b171-a09a642703fb] raise e [ 633.523193] env[62096]: ERROR nova.compute.manager [instance: 27d12301-a049-4d1e-b171-a09a642703fb] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 633.523193] env[62096]: ERROR nova.compute.manager [instance: 27d12301-a049-4d1e-b171-a09a642703fb] nwinfo = self.network_api.allocate_for_instance( [ 633.523193] env[62096]: ERROR nova.compute.manager [instance: 27d12301-a049-4d1e-b171-a09a642703fb] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 633.523193] env[62096]: ERROR nova.compute.manager [instance: 27d12301-a049-4d1e-b171-a09a642703fb] created_port_ids = self._update_ports_for_instance( [ 633.523193] env[62096]: ERROR nova.compute.manager [instance: 27d12301-a049-4d1e-b171-a09a642703fb] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 633.523193] env[62096]: ERROR nova.compute.manager [instance: 27d12301-a049-4d1e-b171-a09a642703fb] with excutils.save_and_reraise_exception(): [ 633.523193] env[62096]: ERROR nova.compute.manager [instance: 27d12301-a049-4d1e-b171-a09a642703fb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 633.524380] env[62096]: ERROR nova.compute.manager [instance: 27d12301-a049-4d1e-b171-a09a642703fb] self.force_reraise() [ 633.524380] env[62096]: ERROR nova.compute.manager [instance: 27d12301-a049-4d1e-b171-a09a642703fb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 633.524380] env[62096]: ERROR nova.compute.manager [instance: 27d12301-a049-4d1e-b171-a09a642703fb] raise self.value [ 633.524380] env[62096]: ERROR nova.compute.manager [instance: 27d12301-a049-4d1e-b171-a09a642703fb] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 633.524380] env[62096]: ERROR nova.compute.manager [instance: 27d12301-a049-4d1e-b171-a09a642703fb] updated_port = self._update_port( [ 633.524380] env[62096]: ERROR nova.compute.manager [instance: 27d12301-a049-4d1e-b171-a09a642703fb] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 633.524380] env[62096]: ERROR nova.compute.manager [instance: 27d12301-a049-4d1e-b171-a09a642703fb] _ensure_no_port_binding_failure(port) [ 633.524380] env[62096]: ERROR nova.compute.manager [instance: 27d12301-a049-4d1e-b171-a09a642703fb] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 633.524380] env[62096]: ERROR nova.compute.manager [instance: 27d12301-a049-4d1e-b171-a09a642703fb] raise exception.PortBindingFailed(port_id=port['id']) [ 633.524380] env[62096]: ERROR nova.compute.manager [instance: 27d12301-a049-4d1e-b171-a09a642703fb] nova.exception.PortBindingFailed: Binding failed for port bdb9b731-7251-4c30-a6e1-466fc3302e95, please check neutron logs for more information. [ 633.524380] env[62096]: ERROR nova.compute.manager [instance: 27d12301-a049-4d1e-b171-a09a642703fb] [ 633.524380] env[62096]: INFO nova.compute.manager [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] [instance: 27d12301-a049-4d1e-b171-a09a642703fb] Terminating instance [ 633.525765] env[62096]: DEBUG oslo_concurrency.lockutils [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Acquiring lock "refresh_cache-27d12301-a049-4d1e-b171-a09a642703fb" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 633.544244] env[62096]: DEBUG nova.scheduler.client.report [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 633.546947] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Expecting reply to msg 2eb7140bc2004569bafbcaa354d7b639 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 633.561798] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2eb7140bc2004569bafbcaa354d7b639 [ 633.665077] env[62096]: DEBUG nova.compute.manager [None req-b67d5e24-acb6-4b89-8ad7-369461375b68 tempest-ServerExternalEventsTest-882999065 tempest-ServerExternalEventsTest-882999065-project-member] [instance: 62b778aa-71c7-480b-8148-017773246caf] Starting instance... {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 633.666813] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b67d5e24-acb6-4b89-8ad7-369461375b68 tempest-ServerExternalEventsTest-882999065 tempest-ServerExternalEventsTest-882999065-project-member] Expecting reply to msg 4328fbbc88274239a9a0a7dbf56c11b8 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 633.705742] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4328fbbc88274239a9a0a7dbf56c11b8 [ 633.800691] env[62096]: DEBUG nova.network.neutron [req-776d6878-9b52-4ee8-976b-21059e9f22ae req-9c87d526-d92f-45b7-ae86-b39446ceaf81 service nova] [instance: 27d12301-a049-4d1e-b171-a09a642703fb] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 633.850519] env[62096]: DEBUG nova.compute.manager [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] Skipping network cache update for instance because it is Building. {{(pid=62096) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9939}} [ 633.850706] env[62096]: DEBUG nova.compute.manager [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] Skipping network cache update for instance because it is Building. {{(pid=62096) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9939}} [ 633.850911] env[62096]: DEBUG nova.compute.manager [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] Skipping network cache update for instance because it is Building. {{(pid=62096) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9939}} [ 633.851124] env[62096]: DEBUG nova.compute.manager [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] [instance: 27d12301-a049-4d1e-b171-a09a642703fb] Skipping network cache update for instance because it is Building. {{(pid=62096) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9939}} [ 633.851193] env[62096]: DEBUG nova.compute.manager [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] Skipping network cache update for instance because it is Building. {{(pid=62096) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9939}} [ 633.907156] env[62096]: DEBUG oslo_concurrency.lockutils [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Acquiring lock "refresh_cache-12a4b44d-b6aa-45e2-bba9-d73f41fa4b61" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 633.907458] env[62096]: DEBUG oslo_concurrency.lockutils [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Acquired lock "refresh_cache-12a4b44d-b6aa-45e2-bba9-d73f41fa4b61" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 633.907718] env[62096]: DEBUG nova.network.neutron [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] [instance: 12a4b44d-b6aa-45e2-bba9-d73f41fa4b61] Forcefully refreshing network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2004}} [ 633.907984] env[62096]: DEBUG nova.objects.instance [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Lazy-loading 'info_cache' on Instance uuid 12a4b44d-b6aa-45e2-bba9-d73f41fa4b61 {{(pid=62096) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 633.908792] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg f0477f80e4ed4522acdf132bfda3c946 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 633.934651] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f0477f80e4ed4522acdf132bfda3c946 [ 633.989402] env[62096]: DEBUG nova.network.neutron [req-776d6878-9b52-4ee8-976b-21059e9f22ae req-9c87d526-d92f-45b7-ae86-b39446ceaf81 service nova] [instance: 27d12301-a049-4d1e-b171-a09a642703fb] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 633.989633] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-776d6878-9b52-4ee8-976b-21059e9f22ae req-9c87d526-d92f-45b7-ae86-b39446ceaf81 service nova] Expecting reply to msg 7ad00b920dac4535be4c3a12f35db8a3 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 633.998244] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7ad00b920dac4535be4c3a12f35db8a3 [ 634.050535] env[62096]: DEBUG oslo_concurrency.lockutils [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.605s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 634.051058] env[62096]: DEBUG nova.compute.manager [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] Start building networks asynchronously for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 634.052729] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Expecting reply to msg eacb9476381946e99cf0be4b02a02e52 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 634.053730] env[62096]: DEBUG oslo_concurrency.lockutils [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.571s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 634.059913] env[62096]: INFO nova.compute.claims [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] [instance: bbed3eed-f511-4b9e-9632-74841df01592] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 634.059913] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Expecting reply to msg a25c5504234a4a33ad2703d0e4e5d199 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 634.091428] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg eacb9476381946e99cf0be4b02a02e52 [ 634.095858] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a25c5504234a4a33ad2703d0e4e5d199 [ 634.186332] env[62096]: DEBUG oslo_concurrency.lockutils [None req-b67d5e24-acb6-4b89-8ad7-369461375b68 tempest-ServerExternalEventsTest-882999065 tempest-ServerExternalEventsTest-882999065-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 634.413506] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg e15a0a37db7a4a29af75d807ae779756 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 634.420308] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e15a0a37db7a4a29af75d807ae779756 [ 634.491807] env[62096]: DEBUG oslo_concurrency.lockutils [req-776d6878-9b52-4ee8-976b-21059e9f22ae req-9c87d526-d92f-45b7-ae86-b39446ceaf81 service nova] Releasing lock "refresh_cache-27d12301-a049-4d1e-b171-a09a642703fb" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 634.492383] env[62096]: DEBUG oslo_concurrency.lockutils [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Acquired lock "refresh_cache-27d12301-a049-4d1e-b171-a09a642703fb" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 634.492605] env[62096]: DEBUG nova.network.neutron [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] [instance: 27d12301-a049-4d1e-b171-a09a642703fb] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 634.493081] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Expecting reply to msg 8319f086cadc44ce89c9d35566c65792 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 634.500011] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8319f086cadc44ce89c9d35566c65792 [ 634.561073] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Expecting reply to msg b482c3e6933e49029f67ed08c701b850 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 634.562804] env[62096]: DEBUG nova.compute.utils [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Using /dev/sd instead of None {{(pid=62096) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 634.563292] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Expecting reply to msg 5e67ee98a18e49b6b4e3b79651248465 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 634.564247] env[62096]: DEBUG nova.compute.manager [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] Allocating IP information in the background. {{(pid=62096) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 634.564413] env[62096]: DEBUG nova.network.neutron [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] allocate_for_instance() {{(pid=62096) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 634.568808] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b482c3e6933e49029f67ed08c701b850 [ 634.572025] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5e67ee98a18e49b6b4e3b79651248465 [ 634.638873] env[62096]: DEBUG nova.policy [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '42d087a1fa2142dabae11c51b21a4f5c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '520f93d17d0c4b48beebc0ecac919190', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62096) authorize /opt/stack/nova/nova/policy.py:203}} [ 634.939847] env[62096]: DEBUG nova.network.neutron [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] [instance: 12a4b44d-b6aa-45e2-bba9-d73f41fa4b61] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 634.940220] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg 788734578e024ce0a80bad5e698e5009 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 634.948307] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 788734578e024ce0a80bad5e698e5009 [ 635.020254] env[62096]: DEBUG nova.network.neutron [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] [instance: 27d12301-a049-4d1e-b171-a09a642703fb] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 635.069268] env[62096]: DEBUG nova.compute.manager [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] Start building block device mappings for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 635.070896] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Expecting reply to msg a14fad5c7fba48beb8e42a801b822d1f in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 635.108063] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a14fad5c7fba48beb8e42a801b822d1f [ 635.207338] env[62096]: DEBUG nova.network.neutron [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] Successfully created port: 285d5c29-d148-4abd-8630-12f7a9c6abf5 {{(pid=62096) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 635.265613] env[62096]: DEBUG nova.network.neutron [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] [instance: 27d12301-a049-4d1e-b171-a09a642703fb] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 635.266181] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Expecting reply to msg 5455f50a3d7e4584a79143fdd17c801c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 635.276437] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5455f50a3d7e4584a79143fdd17c801c [ 635.452327] env[62096]: DEBUG nova.compute.manager [req-9b0ed47f-f207-4f12-b23e-be302c6d663f req-9e1a2dc2-668b-48cc-86a2-ffb355af990e service nova] [instance: 27d12301-a049-4d1e-b171-a09a642703fb] Received event network-vif-deleted-bdb9b731-7251-4c30-a6e1-466fc3302e95 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 635.522555] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d2a5321-4db3-4a01-ab3d-f992143a4394 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.532232] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1a5807c-7c73-4172-8825-dc3e1c426f45 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.561991] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-475b9c4c-60b5-4224-b438-aa3f75a28e20 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.569368] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3db9fdd6-9762-4294-b25f-b725e8a0aa5c {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.575461] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Expecting reply to msg 7c003633a6164b59b234859a85997f3e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 635.587034] env[62096]: DEBUG nova.compute.provider_tree [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 635.587517] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Expecting reply to msg 0ead1842c807411f866b0eaf5dd85c6a in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 635.588869] env[62096]: DEBUG nova.network.neutron [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] [instance: 12a4b44d-b6aa-45e2-bba9-d73f41fa4b61] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 635.589248] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg bcbf85baff0b49929c5e8969a8d4d288 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 635.594124] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0ead1842c807411f866b0eaf5dd85c6a [ 635.602795] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bcbf85baff0b49929c5e8969a8d4d288 [ 635.616829] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7c003633a6164b59b234859a85997f3e [ 635.770740] env[62096]: DEBUG oslo_concurrency.lockutils [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Releasing lock "refresh_cache-27d12301-a049-4d1e-b171-a09a642703fb" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 635.771070] env[62096]: DEBUG nova.compute.manager [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] [instance: 27d12301-a049-4d1e-b171-a09a642703fb] Start destroying the instance on the hypervisor. {{(pid=62096) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 635.771278] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] [instance: 27d12301-a049-4d1e-b171-a09a642703fb] Destroying instance {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 635.771583] env[62096]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0c65abd6-6f4f-4fa5-82d7-24fb3aeb84a1 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.780771] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7a0acee-1d4a-40a4-98b2-5d2250202d75 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.802563] env[62096]: WARNING nova.virt.vmwareapi.vmops [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] [instance: 27d12301-a049-4d1e-b171-a09a642703fb] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 27d12301-a049-4d1e-b171-a09a642703fb could not be found. [ 635.802794] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] [instance: 27d12301-a049-4d1e-b171-a09a642703fb] Instance destroyed {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 635.802979] env[62096]: INFO nova.compute.manager [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] [instance: 27d12301-a049-4d1e-b171-a09a642703fb] Took 0.03 seconds to destroy the instance on the hypervisor. [ 635.803224] env[62096]: DEBUG oslo.service.loopingcall [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 635.803513] env[62096]: DEBUG nova.compute.manager [-] [instance: 27d12301-a049-4d1e-b171-a09a642703fb] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 635.803557] env[62096]: DEBUG nova.network.neutron [-] [instance: 27d12301-a049-4d1e-b171-a09a642703fb] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 635.822251] env[62096]: DEBUG nova.network.neutron [-] [instance: 27d12301-a049-4d1e-b171-a09a642703fb] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 635.823389] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg b7041ab27c9e4bfe8dbe08f8cec0b42f in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 635.834284] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b7041ab27c9e4bfe8dbe08f8cec0b42f [ 636.081896] env[62096]: DEBUG nova.compute.manager [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] Start spawning the instance on the hypervisor. {{(pid=62096) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 636.093841] env[62096]: DEBUG nova.scheduler.client.report [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 636.093841] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Expecting reply to msg 301a90be74b94dc083c831450224861c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 636.094804] env[62096]: DEBUG oslo_concurrency.lockutils [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Releasing lock "refresh_cache-12a4b44d-b6aa-45e2-bba9-d73f41fa4b61" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 636.094804] env[62096]: DEBUG nova.compute.manager [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] [instance: 12a4b44d-b6aa-45e2-bba9-d73f41fa4b61] Updated the network info_cache for instance {{(pid=62096) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 636.096971] env[62096]: DEBUG oslo_service.periodic_task [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62096) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 636.096971] env[62096]: DEBUG oslo_service.periodic_task [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62096) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 636.096971] env[62096]: DEBUG oslo_service.periodic_task [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62096) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 636.096971] env[62096]: DEBUG oslo_service.periodic_task [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62096) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 636.096971] env[62096]: DEBUG oslo_service.periodic_task [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62096) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 636.096971] env[62096]: DEBUG oslo_service.periodic_task [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62096) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 636.097183] env[62096]: DEBUG nova.compute.manager [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62096) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10545}} [ 636.097183] env[62096]: DEBUG oslo_service.periodic_task [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62096) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 636.097183] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg 8ee7217a42054f20838c35b9a329eb43 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 636.107574] env[62096]: DEBUG nova.virt.hardware [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-19T11:52:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-19T11:51:55Z,direct_url=,disk_format='vmdk',id=fb764baa-3805-45c4-a694-aa91b0932110,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bd76085d004b408bbcf1ebf76ba2593e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-19T11:51:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 636.107803] env[62096]: DEBUG nova.virt.hardware [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Flavor limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 636.107968] env[62096]: DEBUG nova.virt.hardware [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Image limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 636.108194] env[62096]: DEBUG nova.virt.hardware [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Flavor pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 636.108397] env[62096]: DEBUG nova.virt.hardware [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Image pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 636.108550] env[62096]: DEBUG nova.virt.hardware [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 636.108752] env[62096]: DEBUG nova.virt.hardware [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 636.108906] env[62096]: DEBUG nova.virt.hardware [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 636.109063] env[62096]: DEBUG nova.virt.hardware [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Got 1 possible topologies {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 636.109224] env[62096]: DEBUG nova.virt.hardware [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 636.109390] env[62096]: DEBUG nova.virt.hardware [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 636.110298] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcda7c9b-a6c2-4f9d-9a28-596e5498a59c {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.113856] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8ee7217a42054f20838c35b9a329eb43 [ 636.115312] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 301a90be74b94dc083c831450224861c [ 636.120445] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c82adbc7-b4ce-419c-a9ef-6c9955d2643b {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.312446] env[62096]: ERROR nova.compute.manager [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 285d5c29-d148-4abd-8630-12f7a9c6abf5, please check neutron logs for more information. [ 636.312446] env[62096]: ERROR nova.compute.manager Traceback (most recent call last): [ 636.312446] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 636.312446] env[62096]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 636.312446] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 636.312446] env[62096]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 636.312446] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 636.312446] env[62096]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 636.312446] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 636.312446] env[62096]: ERROR nova.compute.manager self.force_reraise() [ 636.312446] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 636.312446] env[62096]: ERROR nova.compute.manager raise self.value [ 636.312446] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 636.312446] env[62096]: ERROR nova.compute.manager updated_port = self._update_port( [ 636.312446] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 636.312446] env[62096]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 636.313192] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 636.313192] env[62096]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 636.313192] env[62096]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 285d5c29-d148-4abd-8630-12f7a9c6abf5, please check neutron logs for more information. [ 636.313192] env[62096]: ERROR nova.compute.manager [ 636.313192] env[62096]: Traceback (most recent call last): [ 636.313192] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 636.313192] env[62096]: listener.cb(fileno) [ 636.313192] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 636.313192] env[62096]: result = function(*args, **kwargs) [ 636.313192] env[62096]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 636.313192] env[62096]: return func(*args, **kwargs) [ 636.313192] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 636.313192] env[62096]: raise e [ 636.313192] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 636.313192] env[62096]: nwinfo = self.network_api.allocate_for_instance( [ 636.313192] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 636.313192] env[62096]: created_port_ids = self._update_ports_for_instance( [ 636.313192] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 636.313192] env[62096]: with excutils.save_and_reraise_exception(): [ 636.313192] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 636.313192] env[62096]: self.force_reraise() [ 636.313192] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 636.313192] env[62096]: raise self.value [ 636.313192] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 636.313192] env[62096]: updated_port = self._update_port( [ 636.313192] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 636.313192] env[62096]: _ensure_no_port_binding_failure(port) [ 636.313192] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 636.313192] env[62096]: raise exception.PortBindingFailed(port_id=port['id']) [ 636.314015] env[62096]: nova.exception.PortBindingFailed: Binding failed for port 285d5c29-d148-4abd-8630-12f7a9c6abf5, please check neutron logs for more information. [ 636.314015] env[62096]: Removing descriptor: 14 [ 636.314015] env[62096]: ERROR nova.compute.manager [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 285d5c29-d148-4abd-8630-12f7a9c6abf5, please check neutron logs for more information. [ 636.314015] env[62096]: ERROR nova.compute.manager [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] Traceback (most recent call last): [ 636.314015] env[62096]: ERROR nova.compute.manager [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 636.314015] env[62096]: ERROR nova.compute.manager [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] yield resources [ 636.314015] env[62096]: ERROR nova.compute.manager [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 636.314015] env[62096]: ERROR nova.compute.manager [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] self.driver.spawn(context, instance, image_meta, [ 636.314015] env[62096]: ERROR nova.compute.manager [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 636.314015] env[62096]: ERROR nova.compute.manager [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] self._vmops.spawn(context, instance, image_meta, injected_files, [ 636.314015] env[62096]: ERROR nova.compute.manager [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 636.314015] env[62096]: ERROR nova.compute.manager [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] vm_ref = self.build_virtual_machine(instance, [ 636.314361] env[62096]: ERROR nova.compute.manager [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 636.314361] env[62096]: ERROR nova.compute.manager [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] vif_infos = vmwarevif.get_vif_info(self._session, [ 636.314361] env[62096]: ERROR nova.compute.manager [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 636.314361] env[62096]: ERROR nova.compute.manager [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] for vif in network_info: [ 636.314361] env[62096]: ERROR nova.compute.manager [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 636.314361] env[62096]: ERROR nova.compute.manager [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] return self._sync_wrapper(fn, *args, **kwargs) [ 636.314361] env[62096]: ERROR nova.compute.manager [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 636.314361] env[62096]: ERROR nova.compute.manager [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] self.wait() [ 636.314361] env[62096]: ERROR nova.compute.manager [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 636.314361] env[62096]: ERROR nova.compute.manager [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] self[:] = self._gt.wait() [ 636.314361] env[62096]: ERROR nova.compute.manager [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 636.314361] env[62096]: ERROR nova.compute.manager [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] return self._exit_event.wait() [ 636.314361] env[62096]: ERROR nova.compute.manager [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 636.315061] env[62096]: ERROR nova.compute.manager [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] result = hub.switch() [ 636.315061] env[62096]: ERROR nova.compute.manager [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 636.315061] env[62096]: ERROR nova.compute.manager [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] return self.greenlet.switch() [ 636.315061] env[62096]: ERROR nova.compute.manager [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 636.315061] env[62096]: ERROR nova.compute.manager [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] result = function(*args, **kwargs) [ 636.315061] env[62096]: ERROR nova.compute.manager [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 636.315061] env[62096]: ERROR nova.compute.manager [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] return func(*args, **kwargs) [ 636.315061] env[62096]: ERROR nova.compute.manager [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 636.315061] env[62096]: ERROR nova.compute.manager [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] raise e [ 636.315061] env[62096]: ERROR nova.compute.manager [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 636.315061] env[62096]: ERROR nova.compute.manager [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] nwinfo = self.network_api.allocate_for_instance( [ 636.315061] env[62096]: ERROR nova.compute.manager [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 636.315061] env[62096]: ERROR nova.compute.manager [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] created_port_ids = self._update_ports_for_instance( [ 636.315647] env[62096]: ERROR nova.compute.manager [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 636.315647] env[62096]: ERROR nova.compute.manager [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] with excutils.save_and_reraise_exception(): [ 636.315647] env[62096]: ERROR nova.compute.manager [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 636.315647] env[62096]: ERROR nova.compute.manager [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] self.force_reraise() [ 636.315647] env[62096]: ERROR nova.compute.manager [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 636.315647] env[62096]: ERROR nova.compute.manager [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] raise self.value [ 636.315647] env[62096]: ERROR nova.compute.manager [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 636.315647] env[62096]: ERROR nova.compute.manager [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] updated_port = self._update_port( [ 636.315647] env[62096]: ERROR nova.compute.manager [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 636.315647] env[62096]: ERROR nova.compute.manager [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] _ensure_no_port_binding_failure(port) [ 636.315647] env[62096]: ERROR nova.compute.manager [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 636.315647] env[62096]: ERROR nova.compute.manager [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] raise exception.PortBindingFailed(port_id=port['id']) [ 636.315987] env[62096]: ERROR nova.compute.manager [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] nova.exception.PortBindingFailed: Binding failed for port 285d5c29-d148-4abd-8630-12f7a9c6abf5, please check neutron logs for more information. [ 636.315987] env[62096]: ERROR nova.compute.manager [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] [ 636.315987] env[62096]: INFO nova.compute.manager [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] Terminating instance [ 636.315987] env[62096]: DEBUG oslo_concurrency.lockutils [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Acquiring lock "refresh_cache-d9562762-52de-4a0c-b6a2-2aeaa20e47a0" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 636.316416] env[62096]: DEBUG oslo_concurrency.lockutils [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Acquired lock "refresh_cache-d9562762-52de-4a0c-b6a2-2aeaa20e47a0" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 636.316416] env[62096]: DEBUG nova.network.neutron [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 636.316711] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Expecting reply to msg 005a1da997a543939b8eba4f26b8371c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 636.323972] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 005a1da997a543939b8eba4f26b8371c [ 636.325478] env[62096]: DEBUG nova.network.neutron [-] [instance: 27d12301-a049-4d1e-b171-a09a642703fb] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 636.325848] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg d5b96e4a39e24e8385c6140b24252496 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 636.334455] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d5b96e4a39e24e8385c6140b24252496 [ 636.597284] env[62096]: DEBUG oslo_concurrency.lockutils [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.543s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 636.597420] env[62096]: DEBUG nova.compute.manager [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] [instance: bbed3eed-f511-4b9e-9632-74841df01592] Start building networks asynchronously for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 636.602813] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Expecting reply to msg 5e829ac625984e8e803ce209697a30ed in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 636.602813] env[62096]: DEBUG oslo_concurrency.lockutils [None req-104fcf9b-d4a1-4685-8056-e7a4d7fd3107 tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.932s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 636.602813] env[62096]: INFO nova.compute.claims [None req-104fcf9b-d4a1-4685-8056-e7a4d7fd3107 tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] [instance: 06b9105a-8dcb-4bff-bba2-05e179036f24] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 636.603891] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-104fcf9b-d4a1-4685-8056-e7a4d7fd3107 tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] Expecting reply to msg 5f157e6600134a0bb62e961d12f7d6df in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 636.606058] env[62096]: DEBUG oslo_concurrency.lockutils [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 636.641989] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5e829ac625984e8e803ce209697a30ed [ 636.658036] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5f157e6600134a0bb62e961d12f7d6df [ 636.828176] env[62096]: INFO nova.compute.manager [-] [instance: 27d12301-a049-4d1e-b171-a09a642703fb] Took 1.02 seconds to deallocate network for instance. [ 636.830592] env[62096]: DEBUG nova.compute.claims [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] [instance: 27d12301-a049-4d1e-b171-a09a642703fb] Aborting claim: {{(pid=62096) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 636.830775] env[62096]: DEBUG oslo_concurrency.lockutils [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 636.845887] env[62096]: DEBUG nova.network.neutron [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 636.951041] env[62096]: DEBUG nova.network.neutron [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 636.951573] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Expecting reply to msg 17110e92acd44e66b00c9b3864cee4a5 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 636.965236] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 17110e92acd44e66b00c9b3864cee4a5 [ 637.103083] env[62096]: DEBUG nova.compute.utils [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Using /dev/sd instead of None {{(pid=62096) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 637.103723] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Expecting reply to msg c1f9d7f599b7454b9fc1958611e52ae6 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 637.106911] env[62096]: DEBUG nova.compute.manager [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] [instance: bbed3eed-f511-4b9e-9632-74841df01592] Allocating IP information in the background. {{(pid=62096) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 637.107089] env[62096]: DEBUG nova.network.neutron [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] [instance: bbed3eed-f511-4b9e-9632-74841df01592] allocate_for_instance() {{(pid=62096) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 637.111091] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-104fcf9b-d4a1-4685-8056-e7a4d7fd3107 tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] Expecting reply to msg 3649c6324cd54cd5b23768b5e7b14d90 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 637.120067] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c1f9d7f599b7454b9fc1958611e52ae6 [ 637.123480] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3649c6324cd54cd5b23768b5e7b14d90 [ 637.168994] env[62096]: DEBUG nova.policy [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '42d087a1fa2142dabae11c51b21a4f5c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '520f93d17d0c4b48beebc0ecac919190', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62096) authorize /opt/stack/nova/nova/policy.py:203}} [ 637.454277] env[62096]: DEBUG oslo_concurrency.lockutils [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Releasing lock "refresh_cache-d9562762-52de-4a0c-b6a2-2aeaa20e47a0" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 637.454712] env[62096]: DEBUG nova.compute.manager [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] Start destroying the instance on the hypervisor. {{(pid=62096) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 637.454917] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] Destroying instance {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 637.455245] env[62096]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b350b437-7a6e-4f4b-876c-b02d2c4f0016 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.464120] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bc63a9c-5a4d-4922-a229-944cf865d23d {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.486210] env[62096]: WARNING nova.virt.vmwareapi.vmops [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d9562762-52de-4a0c-b6a2-2aeaa20e47a0 could not be found. [ 637.486455] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] Instance destroyed {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 637.486641] env[62096]: INFO nova.compute.manager [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] Took 0.03 seconds to destroy the instance on the hypervisor. [ 637.486887] env[62096]: DEBUG oslo.service.loopingcall [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 637.487117] env[62096]: DEBUG nova.compute.manager [-] [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 637.487211] env[62096]: DEBUG nova.network.neutron [-] [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 637.506973] env[62096]: DEBUG nova.network.neutron [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] [instance: bbed3eed-f511-4b9e-9632-74841df01592] Successfully created port: 25b221b6-400a-463b-902b-13eb0822967e {{(pid=62096) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 637.572626] env[62096]: DEBUG nova.network.neutron [-] [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 637.573149] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 0ef6a2fed8bf4b6d9f9ae41e63b499cd in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 637.580963] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0ef6a2fed8bf4b6d9f9ae41e63b499cd [ 637.607377] env[62096]: DEBUG nova.compute.manager [req-385188ed-3af6-4b29-b90e-d5c8b80c0fc3 req-d71b59d0-34e4-4c65-ae7f-cd9155f26d7d service nova] [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] Received event network-changed-285d5c29-d148-4abd-8630-12f7a9c6abf5 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 637.608983] env[62096]: DEBUG nova.compute.manager [req-385188ed-3af6-4b29-b90e-d5c8b80c0fc3 req-d71b59d0-34e4-4c65-ae7f-cd9155f26d7d service nova] [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] Refreshing instance network info cache due to event network-changed-285d5c29-d148-4abd-8630-12f7a9c6abf5. {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 637.608983] env[62096]: DEBUG oslo_concurrency.lockutils [req-385188ed-3af6-4b29-b90e-d5c8b80c0fc3 req-d71b59d0-34e4-4c65-ae7f-cd9155f26d7d service nova] Acquiring lock "refresh_cache-d9562762-52de-4a0c-b6a2-2aeaa20e47a0" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 637.608983] env[62096]: DEBUG oslo_concurrency.lockutils [req-385188ed-3af6-4b29-b90e-d5c8b80c0fc3 req-d71b59d0-34e4-4c65-ae7f-cd9155f26d7d service nova] Acquired lock "refresh_cache-d9562762-52de-4a0c-b6a2-2aeaa20e47a0" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 637.608983] env[62096]: DEBUG nova.network.neutron [req-385188ed-3af6-4b29-b90e-d5c8b80c0fc3 req-d71b59d0-34e4-4c65-ae7f-cd9155f26d7d service nova] [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] Refreshing network info cache for port 285d5c29-d148-4abd-8630-12f7a9c6abf5 {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 637.608983] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-385188ed-3af6-4b29-b90e-d5c8b80c0fc3 req-d71b59d0-34e4-4c65-ae7f-cd9155f26d7d service nova] Expecting reply to msg 4ecd93c8e9bb415eac6bd8acdfb84ad8 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 637.611557] env[62096]: DEBUG nova.compute.manager [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] [instance: bbed3eed-f511-4b9e-9632-74841df01592] Start building block device mappings for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 637.611557] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Expecting reply to msg 38d2c465dbc04451ac9aafa213c322bd in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 637.621450] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4ecd93c8e9bb415eac6bd8acdfb84ad8 [ 637.653384] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 38d2c465dbc04451ac9aafa213c322bd [ 638.075896] env[62096]: DEBUG nova.network.neutron [-] [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 638.076558] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg ab8da489a1e54a2abef98ff6f084f6cc in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 638.086317] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ab8da489a1e54a2abef98ff6f084f6cc [ 638.106825] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b78071c-8e0a-499b-bbd7-c2f8657f6886 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.117702] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Expecting reply to msg 41e2b425d58f448197ec008c0aa7f350 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 638.135820] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c4365ee-b714-41bc-9b5e-526a87bb7644 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.171198] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 41e2b425d58f448197ec008c0aa7f350 [ 638.172376] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-671528ea-fc14-4958-8eb8-8e277e58edb5 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.180855] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d6581f2-10b7-4378-9e03-b0b2d35ac19e {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.185608] env[62096]: DEBUG nova.network.neutron [req-385188ed-3af6-4b29-b90e-d5c8b80c0fc3 req-d71b59d0-34e4-4c65-ae7f-cd9155f26d7d service nova] [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 638.201338] env[62096]: DEBUG nova.compute.provider_tree [None req-104fcf9b-d4a1-4685-8056-e7a4d7fd3107 tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 638.202223] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-104fcf9b-d4a1-4685-8056-e7a4d7fd3107 tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] Expecting reply to msg 974d62a5275a43b6ab5c6acebc2a4e9c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 638.210672] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 974d62a5275a43b6ab5c6acebc2a4e9c [ 638.351946] env[62096]: DEBUG nova.network.neutron [req-385188ed-3af6-4b29-b90e-d5c8b80c0fc3 req-d71b59d0-34e4-4c65-ae7f-cd9155f26d7d service nova] [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 638.353049] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-385188ed-3af6-4b29-b90e-d5c8b80c0fc3 req-d71b59d0-34e4-4c65-ae7f-cd9155f26d7d service nova] Expecting reply to msg 2e953a99412f43f9ad3e69ae39a2d53a in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 638.361207] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2e953a99412f43f9ad3e69ae39a2d53a [ 638.527941] env[62096]: ERROR nova.compute.manager [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 25b221b6-400a-463b-902b-13eb0822967e, please check neutron logs for more information. [ 638.527941] env[62096]: ERROR nova.compute.manager Traceback (most recent call last): [ 638.527941] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 638.527941] env[62096]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 638.527941] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 638.527941] env[62096]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 638.527941] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 638.527941] env[62096]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 638.527941] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 638.527941] env[62096]: ERROR nova.compute.manager self.force_reraise() [ 638.527941] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 638.527941] env[62096]: ERROR nova.compute.manager raise self.value [ 638.527941] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 638.527941] env[62096]: ERROR nova.compute.manager updated_port = self._update_port( [ 638.527941] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 638.527941] env[62096]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 638.528662] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 638.528662] env[62096]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 638.528662] env[62096]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 25b221b6-400a-463b-902b-13eb0822967e, please check neutron logs for more information. [ 638.528662] env[62096]: ERROR nova.compute.manager [ 638.528662] env[62096]: Traceback (most recent call last): [ 638.528662] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 638.528662] env[62096]: listener.cb(fileno) [ 638.528662] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 638.528662] env[62096]: result = function(*args, **kwargs) [ 638.528662] env[62096]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 638.528662] env[62096]: return func(*args, **kwargs) [ 638.528662] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 638.528662] env[62096]: raise e [ 638.528662] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 638.528662] env[62096]: nwinfo = self.network_api.allocate_for_instance( [ 638.528662] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 638.528662] env[62096]: created_port_ids = self._update_ports_for_instance( [ 638.528662] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 638.528662] env[62096]: with excutils.save_and_reraise_exception(): [ 638.528662] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 638.528662] env[62096]: self.force_reraise() [ 638.528662] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 638.528662] env[62096]: raise self.value [ 638.528662] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 638.528662] env[62096]: updated_port = self._update_port( [ 638.528662] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 638.528662] env[62096]: _ensure_no_port_binding_failure(port) [ 638.528662] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 638.528662] env[62096]: raise exception.PortBindingFailed(port_id=port['id']) [ 638.529490] env[62096]: nova.exception.PortBindingFailed: Binding failed for port 25b221b6-400a-463b-902b-13eb0822967e, please check neutron logs for more information. [ 638.529490] env[62096]: Removing descriptor: 14 [ 638.578830] env[62096]: INFO nova.compute.manager [-] [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] Took 1.09 seconds to deallocate network for instance. [ 638.581174] env[62096]: DEBUG nova.compute.claims [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] Aborting claim: {{(pid=62096) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 638.581580] env[62096]: DEBUG oslo_concurrency.lockutils [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 638.636283] env[62096]: DEBUG nova.compute.manager [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] [instance: bbed3eed-f511-4b9e-9632-74841df01592] Start spawning the instance on the hypervisor. {{(pid=62096) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 638.663284] env[62096]: DEBUG nova.virt.hardware [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-19T11:52:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-19T11:51:55Z,direct_url=,disk_format='vmdk',id=fb764baa-3805-45c4-a694-aa91b0932110,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bd76085d004b408bbcf1ebf76ba2593e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-19T11:51:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 638.663517] env[62096]: DEBUG nova.virt.hardware [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Flavor limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 638.663666] env[62096]: DEBUG nova.virt.hardware [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Image limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 638.663840] env[62096]: DEBUG nova.virt.hardware [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Flavor pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 638.664014] env[62096]: DEBUG nova.virt.hardware [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Image pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 638.664198] env[62096]: DEBUG nova.virt.hardware [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 638.664560] env[62096]: DEBUG nova.virt.hardware [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 638.664628] env[62096]: DEBUG nova.virt.hardware [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 638.664828] env[62096]: DEBUG nova.virt.hardware [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Got 1 possible topologies {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 638.665036] env[62096]: DEBUG nova.virt.hardware [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 638.665239] env[62096]: DEBUG nova.virt.hardware [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 638.666176] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c463859b-1763-409e-8938-bf74fedb6d36 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.674604] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71cc3c96-c4e1-4f65-8f05-0331bb42f393 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.688524] env[62096]: ERROR nova.compute.manager [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] [instance: bbed3eed-f511-4b9e-9632-74841df01592] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 25b221b6-400a-463b-902b-13eb0822967e, please check neutron logs for more information. [ 638.688524] env[62096]: ERROR nova.compute.manager [instance: bbed3eed-f511-4b9e-9632-74841df01592] Traceback (most recent call last): [ 638.688524] env[62096]: ERROR nova.compute.manager [instance: bbed3eed-f511-4b9e-9632-74841df01592] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 638.688524] env[62096]: ERROR nova.compute.manager [instance: bbed3eed-f511-4b9e-9632-74841df01592] yield resources [ 638.688524] env[62096]: ERROR nova.compute.manager [instance: bbed3eed-f511-4b9e-9632-74841df01592] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 638.688524] env[62096]: ERROR nova.compute.manager [instance: bbed3eed-f511-4b9e-9632-74841df01592] self.driver.spawn(context, instance, image_meta, [ 638.688524] env[62096]: ERROR nova.compute.manager [instance: bbed3eed-f511-4b9e-9632-74841df01592] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 638.688524] env[62096]: ERROR nova.compute.manager [instance: bbed3eed-f511-4b9e-9632-74841df01592] self._vmops.spawn(context, instance, image_meta, injected_files, [ 638.688524] env[62096]: ERROR nova.compute.manager [instance: bbed3eed-f511-4b9e-9632-74841df01592] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 638.688524] env[62096]: ERROR nova.compute.manager [instance: bbed3eed-f511-4b9e-9632-74841df01592] vm_ref = self.build_virtual_machine(instance, [ 638.688524] env[62096]: ERROR nova.compute.manager [instance: bbed3eed-f511-4b9e-9632-74841df01592] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 638.688888] env[62096]: ERROR nova.compute.manager [instance: bbed3eed-f511-4b9e-9632-74841df01592] vif_infos = vmwarevif.get_vif_info(self._session, [ 638.688888] env[62096]: ERROR nova.compute.manager [instance: bbed3eed-f511-4b9e-9632-74841df01592] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 638.688888] env[62096]: ERROR nova.compute.manager [instance: bbed3eed-f511-4b9e-9632-74841df01592] for vif in network_info: [ 638.688888] env[62096]: ERROR nova.compute.manager [instance: bbed3eed-f511-4b9e-9632-74841df01592] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 638.688888] env[62096]: ERROR nova.compute.manager [instance: bbed3eed-f511-4b9e-9632-74841df01592] return self._sync_wrapper(fn, *args, **kwargs) [ 638.688888] env[62096]: ERROR nova.compute.manager [instance: bbed3eed-f511-4b9e-9632-74841df01592] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 638.688888] env[62096]: ERROR nova.compute.manager [instance: bbed3eed-f511-4b9e-9632-74841df01592] self.wait() [ 638.688888] env[62096]: ERROR nova.compute.manager [instance: bbed3eed-f511-4b9e-9632-74841df01592] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 638.688888] env[62096]: ERROR nova.compute.manager [instance: bbed3eed-f511-4b9e-9632-74841df01592] self[:] = self._gt.wait() [ 638.688888] env[62096]: ERROR nova.compute.manager [instance: bbed3eed-f511-4b9e-9632-74841df01592] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 638.688888] env[62096]: ERROR nova.compute.manager [instance: bbed3eed-f511-4b9e-9632-74841df01592] return self._exit_event.wait() [ 638.688888] env[62096]: ERROR nova.compute.manager [instance: bbed3eed-f511-4b9e-9632-74841df01592] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 638.688888] env[62096]: ERROR nova.compute.manager [instance: bbed3eed-f511-4b9e-9632-74841df01592] current.throw(*self._exc) [ 638.689263] env[62096]: ERROR nova.compute.manager [instance: bbed3eed-f511-4b9e-9632-74841df01592] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 638.689263] env[62096]: ERROR nova.compute.manager [instance: bbed3eed-f511-4b9e-9632-74841df01592] result = function(*args, **kwargs) [ 638.689263] env[62096]: ERROR nova.compute.manager [instance: bbed3eed-f511-4b9e-9632-74841df01592] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 638.689263] env[62096]: ERROR nova.compute.manager [instance: bbed3eed-f511-4b9e-9632-74841df01592] return func(*args, **kwargs) [ 638.689263] env[62096]: ERROR nova.compute.manager [instance: bbed3eed-f511-4b9e-9632-74841df01592] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 638.689263] env[62096]: ERROR nova.compute.manager [instance: bbed3eed-f511-4b9e-9632-74841df01592] raise e [ 638.689263] env[62096]: ERROR nova.compute.manager [instance: bbed3eed-f511-4b9e-9632-74841df01592] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 638.689263] env[62096]: ERROR nova.compute.manager [instance: bbed3eed-f511-4b9e-9632-74841df01592] nwinfo = self.network_api.allocate_for_instance( [ 638.689263] env[62096]: ERROR nova.compute.manager [instance: bbed3eed-f511-4b9e-9632-74841df01592] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 638.689263] env[62096]: ERROR nova.compute.manager [instance: bbed3eed-f511-4b9e-9632-74841df01592] created_port_ids = self._update_ports_for_instance( [ 638.689263] env[62096]: ERROR nova.compute.manager [instance: bbed3eed-f511-4b9e-9632-74841df01592] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 638.689263] env[62096]: ERROR nova.compute.manager [instance: bbed3eed-f511-4b9e-9632-74841df01592] with excutils.save_and_reraise_exception(): [ 638.689263] env[62096]: ERROR nova.compute.manager [instance: bbed3eed-f511-4b9e-9632-74841df01592] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 638.689640] env[62096]: ERROR nova.compute.manager [instance: bbed3eed-f511-4b9e-9632-74841df01592] self.force_reraise() [ 638.689640] env[62096]: ERROR nova.compute.manager [instance: bbed3eed-f511-4b9e-9632-74841df01592] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 638.689640] env[62096]: ERROR nova.compute.manager [instance: bbed3eed-f511-4b9e-9632-74841df01592] raise self.value [ 638.689640] env[62096]: ERROR nova.compute.manager [instance: bbed3eed-f511-4b9e-9632-74841df01592] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 638.689640] env[62096]: ERROR nova.compute.manager [instance: bbed3eed-f511-4b9e-9632-74841df01592] updated_port = self._update_port( [ 638.689640] env[62096]: ERROR nova.compute.manager [instance: bbed3eed-f511-4b9e-9632-74841df01592] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 638.689640] env[62096]: ERROR nova.compute.manager [instance: bbed3eed-f511-4b9e-9632-74841df01592] _ensure_no_port_binding_failure(port) [ 638.689640] env[62096]: ERROR nova.compute.manager [instance: bbed3eed-f511-4b9e-9632-74841df01592] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 638.689640] env[62096]: ERROR nova.compute.manager [instance: bbed3eed-f511-4b9e-9632-74841df01592] raise exception.PortBindingFailed(port_id=port['id']) [ 638.689640] env[62096]: ERROR nova.compute.manager [instance: bbed3eed-f511-4b9e-9632-74841df01592] nova.exception.PortBindingFailed: Binding failed for port 25b221b6-400a-463b-902b-13eb0822967e, please check neutron logs for more information. [ 638.689640] env[62096]: ERROR nova.compute.manager [instance: bbed3eed-f511-4b9e-9632-74841df01592] [ 638.689640] env[62096]: INFO nova.compute.manager [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] [instance: bbed3eed-f511-4b9e-9632-74841df01592] Terminating instance [ 638.690818] env[62096]: DEBUG oslo_concurrency.lockutils [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Acquiring lock "refresh_cache-bbed3eed-f511-4b9e-9632-74841df01592" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 638.690978] env[62096]: DEBUG oslo_concurrency.lockutils [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Acquired lock "refresh_cache-bbed3eed-f511-4b9e-9632-74841df01592" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 638.691144] env[62096]: DEBUG nova.network.neutron [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] [instance: bbed3eed-f511-4b9e-9632-74841df01592] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 638.691555] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Expecting reply to msg 85655760cba742e5802ddf819d37cdfe in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 638.698464] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 85655760cba742e5802ddf819d37cdfe [ 638.705070] env[62096]: DEBUG nova.scheduler.client.report [None req-104fcf9b-d4a1-4685-8056-e7a4d7fd3107 tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 638.707391] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-104fcf9b-d4a1-4685-8056-e7a4d7fd3107 tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] Expecting reply to msg d6eb3e9dfb924db090bc2ac9607101e7 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 638.718500] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d6eb3e9dfb924db090bc2ac9607101e7 [ 638.860022] env[62096]: DEBUG oslo_concurrency.lockutils [req-385188ed-3af6-4b29-b90e-d5c8b80c0fc3 req-d71b59d0-34e4-4c65-ae7f-cd9155f26d7d service nova] Releasing lock "refresh_cache-d9562762-52de-4a0c-b6a2-2aeaa20e47a0" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 638.860022] env[62096]: DEBUG nova.compute.manager [req-385188ed-3af6-4b29-b90e-d5c8b80c0fc3 req-d71b59d0-34e4-4c65-ae7f-cd9155f26d7d service nova] [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] Received event network-vif-deleted-285d5c29-d148-4abd-8630-12f7a9c6abf5 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 639.121257] env[62096]: DEBUG oslo_concurrency.lockutils [None req-f0b7d82a-4b7b-42e3-8d2d-3e9d2e37aade tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Acquiring lock "de88f8a1-20a5-49f9-adcb-de48aeaa548a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 639.121486] env[62096]: DEBUG oslo_concurrency.lockutils [None req-f0b7d82a-4b7b-42e3-8d2d-3e9d2e37aade tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Lock "de88f8a1-20a5-49f9-adcb-de48aeaa548a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 639.209479] env[62096]: DEBUG oslo_concurrency.lockutils [None req-104fcf9b-d4a1-4685-8056-e7a4d7fd3107 tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.608s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 639.210008] env[62096]: DEBUG nova.compute.manager [None req-104fcf9b-d4a1-4685-8056-e7a4d7fd3107 tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] [instance: 06b9105a-8dcb-4bff-bba2-05e179036f24] Start building networks asynchronously for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 639.211801] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-104fcf9b-d4a1-4685-8056-e7a4d7fd3107 tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] Expecting reply to msg 3400636b582646688c944380ac271da6 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 639.212805] env[62096]: DEBUG oslo_concurrency.lockutils [None req-a7dac61a-a9fc-4db2-bd21-9f05c56d2478 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 13.422s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 639.214673] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a7dac61a-a9fc-4db2-bd21-9f05c56d2478 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Expecting reply to msg 94d91d32c7d3495cbf31047224145355 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 639.216434] env[62096]: DEBUG nova.network.neutron [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] [instance: bbed3eed-f511-4b9e-9632-74841df01592] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 639.252918] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3400636b582646688c944380ac271da6 [ 639.265677] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 94d91d32c7d3495cbf31047224145355 [ 639.308212] env[62096]: DEBUG nova.network.neutron [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] [instance: bbed3eed-f511-4b9e-9632-74841df01592] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 639.308751] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Expecting reply to msg 3814a155cbe149188edd07628857f5d8 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 639.317800] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3814a155cbe149188edd07628857f5d8 [ 639.588527] env[62096]: DEBUG oslo_concurrency.lockutils [None req-36b7f3a2-925c-4aea-b829-50ed82eff8b4 tempest-ImagesNegativeTestJSON-1271305456 tempest-ImagesNegativeTestJSON-1271305456-project-member] Acquiring lock "d7bd302c-1d85-45c2-9a3e-9855a6488d92" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 639.588800] env[62096]: DEBUG oslo_concurrency.lockutils [None req-36b7f3a2-925c-4aea-b829-50ed82eff8b4 tempest-ImagesNegativeTestJSON-1271305456 tempest-ImagesNegativeTestJSON-1271305456-project-member] Lock "d7bd302c-1d85-45c2-9a3e-9855a6488d92" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 639.636372] env[62096]: DEBUG nova.compute.manager [req-f2dfdeb6-4ca3-466c-9e5c-d3bc3c728fb0 req-61286e3a-8657-456c-8957-c522ba4abd46 service nova] [instance: bbed3eed-f511-4b9e-9632-74841df01592] Received event network-changed-25b221b6-400a-463b-902b-13eb0822967e {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 639.636617] env[62096]: DEBUG nova.compute.manager [req-f2dfdeb6-4ca3-466c-9e5c-d3bc3c728fb0 req-61286e3a-8657-456c-8957-c522ba4abd46 service nova] [instance: bbed3eed-f511-4b9e-9632-74841df01592] Refreshing instance network info cache due to event network-changed-25b221b6-400a-463b-902b-13eb0822967e. {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 639.636740] env[62096]: DEBUG oslo_concurrency.lockutils [req-f2dfdeb6-4ca3-466c-9e5c-d3bc3c728fb0 req-61286e3a-8657-456c-8957-c522ba4abd46 service nova] Acquiring lock "refresh_cache-bbed3eed-f511-4b9e-9632-74841df01592" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 639.717605] env[62096]: DEBUG nova.compute.utils [None req-104fcf9b-d4a1-4685-8056-e7a4d7fd3107 tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] Using /dev/sd instead of None {{(pid=62096) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 639.718164] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-104fcf9b-d4a1-4685-8056-e7a4d7fd3107 tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] Expecting reply to msg f35ae81f7565443a8cbdb510efd0019d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 639.719495] env[62096]: DEBUG nova.compute.manager [None req-104fcf9b-d4a1-4685-8056-e7a4d7fd3107 tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] [instance: 06b9105a-8dcb-4bff-bba2-05e179036f24] Not allocating networking since 'none' was specified. {{(pid=62096) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 639.729499] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f35ae81f7565443a8cbdb510efd0019d [ 639.817952] env[62096]: DEBUG oslo_concurrency.lockutils [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Releasing lock "refresh_cache-bbed3eed-f511-4b9e-9632-74841df01592" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 639.818448] env[62096]: DEBUG nova.compute.manager [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] [instance: bbed3eed-f511-4b9e-9632-74841df01592] Start destroying the instance on the hypervisor. {{(pid=62096) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 639.818660] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] [instance: bbed3eed-f511-4b9e-9632-74841df01592] Destroying instance {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 639.821566] env[62096]: DEBUG oslo_concurrency.lockutils [req-f2dfdeb6-4ca3-466c-9e5c-d3bc3c728fb0 req-61286e3a-8657-456c-8957-c522ba4abd46 service nova] Acquired lock "refresh_cache-bbed3eed-f511-4b9e-9632-74841df01592" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 639.821868] env[62096]: DEBUG nova.network.neutron [req-f2dfdeb6-4ca3-466c-9e5c-d3bc3c728fb0 req-61286e3a-8657-456c-8957-c522ba4abd46 service nova] [instance: bbed3eed-f511-4b9e-9632-74841df01592] Refreshing network info cache for port 25b221b6-400a-463b-902b-13eb0822967e {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 639.822165] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-f2dfdeb6-4ca3-466c-9e5c-d3bc3c728fb0 req-61286e3a-8657-456c-8957-c522ba4abd46 service nova] Expecting reply to msg e25b678b161b47648034592a6572a6df in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 639.823234] env[62096]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-fff6c19a-739a-4577-86b9-257b4e05aa89 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.837410] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e25b678b161b47648034592a6572a6df [ 639.841722] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bafa80b3-ac00-415b-85cb-8ff4a6bb1b06 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.868056] env[62096]: WARNING nova.virt.vmwareapi.vmops [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] [instance: bbed3eed-f511-4b9e-9632-74841df01592] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance bbed3eed-f511-4b9e-9632-74841df01592 could not be found. [ 639.868341] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] [instance: bbed3eed-f511-4b9e-9632-74841df01592] Instance destroyed {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 639.868527] env[62096]: INFO nova.compute.manager [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] [instance: bbed3eed-f511-4b9e-9632-74841df01592] Took 0.05 seconds to destroy the instance on the hypervisor. [ 639.868766] env[62096]: DEBUG oslo.service.loopingcall [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 639.871191] env[62096]: DEBUG nova.compute.manager [-] [instance: bbed3eed-f511-4b9e-9632-74841df01592] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 639.871287] env[62096]: DEBUG nova.network.neutron [-] [instance: bbed3eed-f511-4b9e-9632-74841df01592] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 639.890461] env[62096]: DEBUG nova.network.neutron [-] [instance: bbed3eed-f511-4b9e-9632-74841df01592] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 639.890991] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 2508830aeddc4747a1e38e5f4e4fee97 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 639.899372] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2508830aeddc4747a1e38e5f4e4fee97 [ 640.165586] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4554c64-5f22-41ac-aa68-80a8ce75f340 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.173530] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a206c45-21ea-435e-a47c-080ebcc0f146 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.203625] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b857c413-5f7a-4e3a-9841-3a832b8072d3 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.210789] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-991d2c0c-4e73-4bf4-9d0e-b816adf8fedc {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.223397] env[62096]: DEBUG nova.compute.manager [None req-104fcf9b-d4a1-4685-8056-e7a4d7fd3107 tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] [instance: 06b9105a-8dcb-4bff-bba2-05e179036f24] Start building block device mappings for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 640.225094] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-104fcf9b-d4a1-4685-8056-e7a4d7fd3107 tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] Expecting reply to msg c6d87d54048c40f3b6935801186735d0 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 640.226177] env[62096]: DEBUG nova.compute.provider_tree [None req-a7dac61a-a9fc-4db2-bd21-9f05c56d2478 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 640.226596] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a7dac61a-a9fc-4db2-bd21-9f05c56d2478 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Expecting reply to msg e0999105caa64c6d945dc3bc89443963 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 640.236725] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e0999105caa64c6d945dc3bc89443963 [ 640.258308] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c6d87d54048c40f3b6935801186735d0 [ 640.342234] env[62096]: DEBUG nova.network.neutron [req-f2dfdeb6-4ca3-466c-9e5c-d3bc3c728fb0 req-61286e3a-8657-456c-8957-c522ba4abd46 service nova] [instance: bbed3eed-f511-4b9e-9632-74841df01592] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 640.393524] env[62096]: DEBUG nova.network.neutron [-] [instance: bbed3eed-f511-4b9e-9632-74841df01592] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 640.394040] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 3487c32d61764499ad061a92138f0a6d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 640.406429] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3487c32d61764499ad061a92138f0a6d [ 640.422555] env[62096]: DEBUG nova.network.neutron [req-f2dfdeb6-4ca3-466c-9e5c-d3bc3c728fb0 req-61286e3a-8657-456c-8957-c522ba4abd46 service nova] [instance: bbed3eed-f511-4b9e-9632-74841df01592] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 640.423261] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-f2dfdeb6-4ca3-466c-9e5c-d3bc3c728fb0 req-61286e3a-8657-456c-8957-c522ba4abd46 service nova] Expecting reply to msg 2a2b35cac0c14c119b9fc30f3bbf7daa in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 640.431303] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2a2b35cac0c14c119b9fc30f3bbf7daa [ 640.730467] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-104fcf9b-d4a1-4685-8056-e7a4d7fd3107 tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] Expecting reply to msg 048b81b4e2a4460da31b98eb699e600a in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 640.731993] env[62096]: DEBUG nova.scheduler.client.report [None req-a7dac61a-a9fc-4db2-bd21-9f05c56d2478 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 640.734406] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a7dac61a-a9fc-4db2-bd21-9f05c56d2478 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Expecting reply to msg 63d3709de8414d4493d28ac2dd540c70 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 640.745590] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 63d3709de8414d4493d28ac2dd540c70 [ 640.759600] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 048b81b4e2a4460da31b98eb699e600a [ 640.900292] env[62096]: INFO nova.compute.manager [-] [instance: bbed3eed-f511-4b9e-9632-74841df01592] Took 1.03 seconds to deallocate network for instance. [ 640.902682] env[62096]: DEBUG nova.compute.claims [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] [instance: bbed3eed-f511-4b9e-9632-74841df01592] Aborting claim: {{(pid=62096) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 640.902907] env[62096]: DEBUG oslo_concurrency.lockutils [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 640.925498] env[62096]: DEBUG oslo_concurrency.lockutils [req-f2dfdeb6-4ca3-466c-9e5c-d3bc3c728fb0 req-61286e3a-8657-456c-8957-c522ba4abd46 service nova] Releasing lock "refresh_cache-bbed3eed-f511-4b9e-9632-74841df01592" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 640.925831] env[62096]: DEBUG nova.compute.manager [req-f2dfdeb6-4ca3-466c-9e5c-d3bc3c728fb0 req-61286e3a-8657-456c-8957-c522ba4abd46 service nova] [instance: bbed3eed-f511-4b9e-9632-74841df01592] Received event network-vif-deleted-25b221b6-400a-463b-902b-13eb0822967e {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 641.237240] env[62096]: DEBUG nova.compute.manager [None req-104fcf9b-d4a1-4685-8056-e7a4d7fd3107 tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] [instance: 06b9105a-8dcb-4bff-bba2-05e179036f24] Start spawning the instance on the hypervisor. {{(pid=62096) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 641.239867] env[62096]: DEBUG oslo_concurrency.lockutils [None req-a7dac61a-a9fc-4db2-bd21-9f05c56d2478 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.027s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 641.240404] env[62096]: ERROR nova.compute.manager [None req-a7dac61a-a9fc-4db2-bd21-9f05c56d2478 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 62ed3475-1d31-4edd-b7c2-33b14a3775c7, please check neutron logs for more information. [ 641.240404] env[62096]: ERROR nova.compute.manager [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] Traceback (most recent call last): [ 641.240404] env[62096]: ERROR nova.compute.manager [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 641.240404] env[62096]: ERROR nova.compute.manager [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] self.driver.spawn(context, instance, image_meta, [ 641.240404] env[62096]: ERROR nova.compute.manager [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 641.240404] env[62096]: ERROR nova.compute.manager [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] self._vmops.spawn(context, instance, image_meta, injected_files, [ 641.240404] env[62096]: ERROR nova.compute.manager [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 641.240404] env[62096]: ERROR nova.compute.manager [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] vm_ref = self.build_virtual_machine(instance, [ 641.240404] env[62096]: ERROR nova.compute.manager [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 641.240404] env[62096]: ERROR nova.compute.manager [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] vif_infos = vmwarevif.get_vif_info(self._session, [ 641.240404] env[62096]: ERROR nova.compute.manager [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 641.240905] env[62096]: ERROR nova.compute.manager [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] for vif in network_info: [ 641.240905] env[62096]: ERROR nova.compute.manager [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 641.240905] env[62096]: ERROR nova.compute.manager [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] return self._sync_wrapper(fn, *args, **kwargs) [ 641.240905] env[62096]: ERROR nova.compute.manager [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 641.240905] env[62096]: ERROR nova.compute.manager [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] self.wait() [ 641.240905] env[62096]: ERROR nova.compute.manager [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 641.240905] env[62096]: ERROR nova.compute.manager [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] self[:] = self._gt.wait() [ 641.240905] env[62096]: ERROR nova.compute.manager [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 641.240905] env[62096]: ERROR nova.compute.manager [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] return self._exit_event.wait() [ 641.240905] env[62096]: ERROR nova.compute.manager [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 641.240905] env[62096]: ERROR nova.compute.manager [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] result = hub.switch() [ 641.240905] env[62096]: ERROR nova.compute.manager [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 641.240905] env[62096]: ERROR nova.compute.manager [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] return self.greenlet.switch() [ 641.241435] env[62096]: ERROR nova.compute.manager [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 641.241435] env[62096]: ERROR nova.compute.manager [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] result = function(*args, **kwargs) [ 641.241435] env[62096]: ERROR nova.compute.manager [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 641.241435] env[62096]: ERROR nova.compute.manager [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] return func(*args, **kwargs) [ 641.241435] env[62096]: ERROR nova.compute.manager [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 641.241435] env[62096]: ERROR nova.compute.manager [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] raise e [ 641.241435] env[62096]: ERROR nova.compute.manager [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 641.241435] env[62096]: ERROR nova.compute.manager [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] nwinfo = self.network_api.allocate_for_instance( [ 641.241435] env[62096]: ERROR nova.compute.manager [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 641.241435] env[62096]: ERROR nova.compute.manager [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] created_port_ids = self._update_ports_for_instance( [ 641.241435] env[62096]: ERROR nova.compute.manager [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 641.241435] env[62096]: ERROR nova.compute.manager [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] with excutils.save_and_reraise_exception(): [ 641.241435] env[62096]: ERROR nova.compute.manager [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 641.242168] env[62096]: ERROR nova.compute.manager [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] self.force_reraise() [ 641.242168] env[62096]: ERROR nova.compute.manager [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 641.242168] env[62096]: ERROR nova.compute.manager [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] raise self.value [ 641.242168] env[62096]: ERROR nova.compute.manager [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 641.242168] env[62096]: ERROR nova.compute.manager [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] updated_port = self._update_port( [ 641.242168] env[62096]: ERROR nova.compute.manager [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 641.242168] env[62096]: ERROR nova.compute.manager [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] _ensure_no_port_binding_failure(port) [ 641.242168] env[62096]: ERROR nova.compute.manager [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 641.242168] env[62096]: ERROR nova.compute.manager [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] raise exception.PortBindingFailed(port_id=port['id']) [ 641.242168] env[62096]: ERROR nova.compute.manager [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] nova.exception.PortBindingFailed: Binding failed for port 62ed3475-1d31-4edd-b7c2-33b14a3775c7, please check neutron logs for more information. [ 641.242168] env[62096]: ERROR nova.compute.manager [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] [ 641.249161] env[62096]: DEBUG nova.compute.utils [None req-a7dac61a-a9fc-4db2-bd21-9f05c56d2478 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] Binding failed for port 62ed3475-1d31-4edd-b7c2-33b14a3775c7, please check neutron logs for more information. {{(pid=62096) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 641.249161] env[62096]: DEBUG oslo_concurrency.lockutils [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.310s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 641.249161] env[62096]: INFO nova.compute.claims [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 641.249161] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Expecting reply to msg 9af40e6cd6a44edd821bb34d83514a77 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 641.249161] env[62096]: DEBUG nova.compute.manager [None req-a7dac61a-a9fc-4db2-bd21-9f05c56d2478 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] Build of instance 805c4c87-4e1a-4733-86a0-4c82daf615eb was re-scheduled: Binding failed for port 62ed3475-1d31-4edd-b7c2-33b14a3775c7, please check neutron logs for more information. {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 641.251594] env[62096]: DEBUG nova.compute.manager [None req-a7dac61a-a9fc-4db2-bd21-9f05c56d2478 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] Unplugging VIFs for instance {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 641.251594] env[62096]: DEBUG oslo_concurrency.lockutils [None req-a7dac61a-a9fc-4db2-bd21-9f05c56d2478 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Acquiring lock "refresh_cache-805c4c87-4e1a-4733-86a0-4c82daf615eb" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 641.251594] env[62096]: DEBUG oslo_concurrency.lockutils [None req-a7dac61a-a9fc-4db2-bd21-9f05c56d2478 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Acquired lock "refresh_cache-805c4c87-4e1a-4733-86a0-4c82daf615eb" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 641.251594] env[62096]: DEBUG nova.network.neutron [None req-a7dac61a-a9fc-4db2-bd21-9f05c56d2478 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 641.251594] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a7dac61a-a9fc-4db2-bd21-9f05c56d2478 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Expecting reply to msg d03cf0e9d1c242ba93c9955abc071af7 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 641.255690] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d03cf0e9d1c242ba93c9955abc071af7 [ 641.268123] env[62096]: DEBUG nova.virt.hardware [None req-104fcf9b-d4a1-4685-8056-e7a4d7fd3107 tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-19T11:52:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-19T11:51:55Z,direct_url=,disk_format='vmdk',id=fb764baa-3805-45c4-a694-aa91b0932110,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bd76085d004b408bbcf1ebf76ba2593e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-19T11:51:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 641.268538] env[62096]: DEBUG nova.virt.hardware [None req-104fcf9b-d4a1-4685-8056-e7a4d7fd3107 tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] Flavor limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 641.268806] env[62096]: DEBUG nova.virt.hardware [None req-104fcf9b-d4a1-4685-8056-e7a4d7fd3107 tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] Image limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 641.269094] env[62096]: DEBUG nova.virt.hardware [None req-104fcf9b-d4a1-4685-8056-e7a4d7fd3107 tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] Flavor pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 641.269335] env[62096]: DEBUG nova.virt.hardware [None req-104fcf9b-d4a1-4685-8056-e7a4d7fd3107 tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] Image pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 641.269583] env[62096]: DEBUG nova.virt.hardware [None req-104fcf9b-d4a1-4685-8056-e7a4d7fd3107 tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 641.269900] env[62096]: DEBUG nova.virt.hardware [None req-104fcf9b-d4a1-4685-8056-e7a4d7fd3107 tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 641.270160] env[62096]: DEBUG nova.virt.hardware [None req-104fcf9b-d4a1-4685-8056-e7a4d7fd3107 tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 641.270449] env[62096]: DEBUG nova.virt.hardware [None req-104fcf9b-d4a1-4685-8056-e7a4d7fd3107 tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] Got 1 possible topologies {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 641.270734] env[62096]: DEBUG nova.virt.hardware [None req-104fcf9b-d4a1-4685-8056-e7a4d7fd3107 tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 641.271035] env[62096]: DEBUG nova.virt.hardware [None req-104fcf9b-d4a1-4685-8056-e7a4d7fd3107 tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 641.272403] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df77369d-0e6a-412c-a502-2193a938d40f {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.281381] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8d0dc47-0a6f-4fd7-ad1d-149fa4ce54a3 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.286263] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9af40e6cd6a44edd821bb34d83514a77 [ 641.296398] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-104fcf9b-d4a1-4685-8056-e7a4d7fd3107 tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] [instance: 06b9105a-8dcb-4bff-bba2-05e179036f24] Instance VIF info [] {{(pid=62096) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 641.301785] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [None req-104fcf9b-d4a1-4685-8056-e7a4d7fd3107 tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] Creating folder: Project (e69c7793c7b4407d8043f5bf60bb8b07). Parent ref: group-v107847. {{(pid=62096) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 641.302379] env[62096]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b2770ede-1f80-49ee-b716-c2c285077567 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.312042] env[62096]: INFO nova.virt.vmwareapi.vm_util [None req-104fcf9b-d4a1-4685-8056-e7a4d7fd3107 tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] Created folder: Project (e69c7793c7b4407d8043f5bf60bb8b07) in parent group-v107847. [ 641.312340] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [None req-104fcf9b-d4a1-4685-8056-e7a4d7fd3107 tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] Creating folder: Instances. Parent ref: group-v107859. {{(pid=62096) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 641.312659] env[62096]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-58ab0a39-fcbf-4667-8644-734267347251 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.321320] env[62096]: INFO nova.virt.vmwareapi.vm_util [None req-104fcf9b-d4a1-4685-8056-e7a4d7fd3107 tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] Created folder: Instances in parent group-v107859. [ 641.321670] env[62096]: DEBUG oslo.service.loopingcall [None req-104fcf9b-d4a1-4685-8056-e7a4d7fd3107 tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 641.321952] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 06b9105a-8dcb-4bff-bba2-05e179036f24] Creating VM on the ESX host {{(pid=62096) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 641.322240] env[62096]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-127e5cb0-0366-4f26-ae02-b96819d104b1 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.338422] env[62096]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 641.338422] env[62096]: value = "task-397381" [ 641.338422] env[62096]: _type = "Task" [ 641.338422] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 641.345846] env[62096]: DEBUG oslo_vmware.api [-] Task: {'id': task-397381, 'name': CreateVM_Task} progress is 0%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.751395] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Expecting reply to msg 07ef75bfca1f41248243934344d6c926 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 641.759644] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 07ef75bfca1f41248243934344d6c926 [ 641.767349] env[62096]: DEBUG nova.network.neutron [None req-a7dac61a-a9fc-4db2-bd21-9f05c56d2478 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 641.843737] env[62096]: DEBUG nova.network.neutron [None req-a7dac61a-a9fc-4db2-bd21-9f05c56d2478 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 641.844364] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a7dac61a-a9fc-4db2-bd21-9f05c56d2478 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Expecting reply to msg 5aeb8aca46ee46049f28631be6d5e977 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 641.852398] env[62096]: DEBUG oslo_vmware.api [-] Task: {'id': task-397381, 'name': CreateVM_Task, 'duration_secs': 0.247371} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 641.852886] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5aeb8aca46ee46049f28631be6d5e977 [ 641.853189] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 06b9105a-8dcb-4bff-bba2-05e179036f24] Created VM on the ESX host {{(pid=62096) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 641.853565] env[62096]: DEBUG oslo_concurrency.lockutils [None req-104fcf9b-d4a1-4685-8056-e7a4d7fd3107 tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 641.853717] env[62096]: DEBUG oslo_concurrency.lockutils [None req-104fcf9b-d4a1-4685-8056-e7a4d7fd3107 tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] Acquired lock "[datastore2] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 641.854054] env[62096]: DEBUG oslo_concurrency.lockutils [None req-104fcf9b-d4a1-4685-8056-e7a4d7fd3107 tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 641.854488] env[62096]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4295dd10-947a-40bb-88c4-f29529a9b537 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.858778] env[62096]: DEBUG oslo_vmware.api [None req-104fcf9b-d4a1-4685-8056-e7a4d7fd3107 tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] Waiting for the task: (returnval){ [ 641.858778] env[62096]: value = "session[5264b2d0-7e3b-3d3c-bfd1-bd630cad6eae]52bfd0f5-f27b-0cab-c089-90e143a6d4e0" [ 641.858778] env[62096]: _type = "Task" [ 641.858778] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 641.866025] env[62096]: DEBUG oslo_vmware.api [None req-104fcf9b-d4a1-4685-8056-e7a4d7fd3107 tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] Task: {'id': session[5264b2d0-7e3b-3d3c-bfd1-bd630cad6eae]52bfd0f5-f27b-0cab-c089-90e143a6d4e0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 642.346562] env[62096]: DEBUG oslo_concurrency.lockutils [None req-a7dac61a-a9fc-4db2-bd21-9f05c56d2478 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Releasing lock "refresh_cache-805c4c87-4e1a-4733-86a0-4c82daf615eb" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 642.346791] env[62096]: DEBUG nova.compute.manager [None req-a7dac61a-a9fc-4db2-bd21-9f05c56d2478 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 642.346972] env[62096]: DEBUG nova.compute.manager [None req-a7dac61a-a9fc-4db2-bd21-9f05c56d2478 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 642.347139] env[62096]: DEBUG nova.network.neutron [None req-a7dac61a-a9fc-4db2-bd21-9f05c56d2478 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 642.363499] env[62096]: DEBUG nova.network.neutron [None req-a7dac61a-a9fc-4db2-bd21-9f05c56d2478 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 642.364112] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a7dac61a-a9fc-4db2-bd21-9f05c56d2478 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Expecting reply to msg d19f81d34b234338b38e44638965c202 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 642.368576] env[62096]: DEBUG oslo_vmware.api [None req-104fcf9b-d4a1-4685-8056-e7a4d7fd3107 tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] Task: {'id': session[5264b2d0-7e3b-3d3c-bfd1-bd630cad6eae]52bfd0f5-f27b-0cab-c089-90e143a6d4e0, 'name': SearchDatastore_Task, 'duration_secs': 0.008097} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 642.371078] env[62096]: DEBUG oslo_concurrency.lockutils [None req-104fcf9b-d4a1-4685-8056-e7a4d7fd3107 tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] Releasing lock "[datastore2] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 642.371291] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-104fcf9b-d4a1-4685-8056-e7a4d7fd3107 tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] [instance: 06b9105a-8dcb-4bff-bba2-05e179036f24] Processing image fb764baa-3805-45c4-a694-aa91b0932110 {{(pid=62096) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 642.371505] env[62096]: DEBUG oslo_concurrency.lockutils [None req-104fcf9b-d4a1-4685-8056-e7a4d7fd3107 tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110/fb764baa-3805-45c4-a694-aa91b0932110.vmdk" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 642.371639] env[62096]: DEBUG oslo_concurrency.lockutils [None req-104fcf9b-d4a1-4685-8056-e7a4d7fd3107 tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] Acquired lock "[datastore2] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110/fb764baa-3805-45c4-a694-aa91b0932110.vmdk" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 642.371804] env[62096]: DEBUG nova.virt.vmwareapi.ds_util [None req-104fcf9b-d4a1-4685-8056-e7a4d7fd3107 tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62096) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 642.372232] env[62096]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b2dec353-c94a-4ec8-a627-d4aaee2b1171 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.374132] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d19f81d34b234338b38e44638965c202 [ 642.380187] env[62096]: DEBUG nova.virt.vmwareapi.ds_util [None req-104fcf9b-d4a1-4685-8056-e7a4d7fd3107 tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62096) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 642.380357] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-104fcf9b-d4a1-4685-8056-e7a4d7fd3107 tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62096) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 642.382953] env[62096]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ca1e2cb9-67a8-46c1-874d-3773bf943805 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.389175] env[62096]: DEBUG oslo_vmware.api [None req-104fcf9b-d4a1-4685-8056-e7a4d7fd3107 tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] Waiting for the task: (returnval){ [ 642.389175] env[62096]: value = "session[5264b2d0-7e3b-3d3c-bfd1-bd630cad6eae]5254b428-e2ea-ebe9-5af3-caa313bcfc33" [ 642.389175] env[62096]: _type = "Task" [ 642.389175] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 642.396803] env[62096]: DEBUG oslo_vmware.api [None req-104fcf9b-d4a1-4685-8056-e7a4d7fd3107 tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] Task: {'id': session[5264b2d0-7e3b-3d3c-bfd1-bd630cad6eae]5254b428-e2ea-ebe9-5af3-caa313bcfc33, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 642.724347] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bdf1b17-7ff3-46a8-8cb7-04af2cce3084 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.731889] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b90bc8ad-df3a-408b-b98d-b611d285d909 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.762250] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af645038-e5fc-42fe-8f93-9fb6b9a6301e {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.769880] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-979c6ce4-213b-46ad-8ec0-ca7c01368a49 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.784708] env[62096]: DEBUG nova.compute.provider_tree [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 642.785250] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Expecting reply to msg 27e4b1a894cf408e9516d18539563391 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 642.793705] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 27e4b1a894cf408e9516d18539563391 [ 642.872083] env[62096]: DEBUG nova.network.neutron [None req-a7dac61a-a9fc-4db2-bd21-9f05c56d2478 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 642.872556] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a7dac61a-a9fc-4db2-bd21-9f05c56d2478 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Expecting reply to msg 5725f6f4035e4a34b3213b9741513def in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 642.880711] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5725f6f4035e4a34b3213b9741513def [ 642.899486] env[62096]: DEBUG oslo_vmware.api [None req-104fcf9b-d4a1-4685-8056-e7a4d7fd3107 tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] Task: {'id': session[5264b2d0-7e3b-3d3c-bfd1-bd630cad6eae]5254b428-e2ea-ebe9-5af3-caa313bcfc33, 'name': SearchDatastore_Task, 'duration_secs': 0.008099} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 642.900280] env[62096]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-95036db7-c10f-4720-9190-cc602784285b {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.905626] env[62096]: DEBUG oslo_vmware.api [None req-104fcf9b-d4a1-4685-8056-e7a4d7fd3107 tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] Waiting for the task: (returnval){ [ 642.905626] env[62096]: value = "session[5264b2d0-7e3b-3d3c-bfd1-bd630cad6eae]521ea2b4-0319-3ddc-df1c-d91e02135560" [ 642.905626] env[62096]: _type = "Task" [ 642.905626] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 642.912941] env[62096]: DEBUG oslo_vmware.api [None req-104fcf9b-d4a1-4685-8056-e7a4d7fd3107 tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] Task: {'id': session[5264b2d0-7e3b-3d3c-bfd1-bd630cad6eae]521ea2b4-0319-3ddc-df1c-d91e02135560, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 643.297464] env[62096]: DEBUG nova.scheduler.client.report [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 643.297464] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Expecting reply to msg 915efaff13ac4ce297aed4a7fdee1f5a in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 643.308974] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 915efaff13ac4ce297aed4a7fdee1f5a [ 643.374768] env[62096]: INFO nova.compute.manager [None req-a7dac61a-a9fc-4db2-bd21-9f05c56d2478 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: 805c4c87-4e1a-4733-86a0-4c82daf615eb] Took 1.03 seconds to deallocate network for instance. [ 643.376746] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a7dac61a-a9fc-4db2-bd21-9f05c56d2478 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Expecting reply to msg 23ed61388b7d42eba9a06e251e66066b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 643.409910] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 23ed61388b7d42eba9a06e251e66066b [ 643.418394] env[62096]: DEBUG oslo_vmware.api [None req-104fcf9b-d4a1-4685-8056-e7a4d7fd3107 tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] Task: {'id': session[5264b2d0-7e3b-3d3c-bfd1-bd630cad6eae]521ea2b4-0319-3ddc-df1c-d91e02135560, 'name': SearchDatastore_Task, 'duration_secs': 0.008046} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 643.418824] env[62096]: DEBUG oslo_concurrency.lockutils [None req-104fcf9b-d4a1-4685-8056-e7a4d7fd3107 tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] Releasing lock "[datastore2] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110/fb764baa-3805-45c4-a694-aa91b0932110.vmdk" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 643.419174] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [None req-104fcf9b-d4a1-4685-8056-e7a4d7fd3107 tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110/fb764baa-3805-45c4-a694-aa91b0932110.vmdk to [datastore2] 06b9105a-8dcb-4bff-bba2-05e179036f24/06b9105a-8dcb-4bff-bba2-05e179036f24.vmdk {{(pid=62096) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 643.419578] env[62096]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-48da113e-35a7-4c72-9903-16da66d8c18a {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.426203] env[62096]: DEBUG oslo_vmware.api [None req-104fcf9b-d4a1-4685-8056-e7a4d7fd3107 tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] Waiting for the task: (returnval){ [ 643.426203] env[62096]: value = "task-397382" [ 643.426203] env[62096]: _type = "Task" [ 643.426203] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 643.434813] env[62096]: DEBUG oslo_vmware.api [None req-104fcf9b-d4a1-4685-8056-e7a4d7fd3107 tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] Task: {'id': task-397382, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 643.816611] env[62096]: DEBUG oslo_concurrency.lockutils [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.562s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 643.816611] env[62096]: DEBUG nova.compute.manager [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] Start building networks asynchronously for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 643.816611] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Expecting reply to msg cdd60771db2b4eab9c85f1bf87d7814e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 643.816611] env[62096]: DEBUG oslo_concurrency.lockutils [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.738s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 643.820255] env[62096]: INFO nova.compute.claims [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 643.821175] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Expecting reply to msg cec85f1e45fb4b0e9b8006b811c46b6d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 643.845184] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cdd60771db2b4eab9c85f1bf87d7814e [ 643.863396] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cec85f1e45fb4b0e9b8006b811c46b6d [ 643.880990] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a7dac61a-a9fc-4db2-bd21-9f05c56d2478 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Expecting reply to msg bba965749d674770906b82b2e185acff in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 643.914758] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bba965749d674770906b82b2e185acff [ 643.935950] env[62096]: DEBUG oslo_vmware.api [None req-104fcf9b-d4a1-4685-8056-e7a4d7fd3107 tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] Task: {'id': task-397382, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.419901} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 643.936226] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [None req-104fcf9b-d4a1-4685-8056-e7a4d7fd3107 tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110/fb764baa-3805-45c4-a694-aa91b0932110.vmdk to [datastore2] 06b9105a-8dcb-4bff-bba2-05e179036f24/06b9105a-8dcb-4bff-bba2-05e179036f24.vmdk {{(pid=62096) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 643.936438] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-104fcf9b-d4a1-4685-8056-e7a4d7fd3107 tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] [instance: 06b9105a-8dcb-4bff-bba2-05e179036f24] Extending root virtual disk to 1048576 {{(pid=62096) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 643.936676] env[62096]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6053d0b2-4b22-4c3a-a4ee-4d1d62a7a904 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.942787] env[62096]: DEBUG oslo_vmware.api [None req-104fcf9b-d4a1-4685-8056-e7a4d7fd3107 tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] Waiting for the task: (returnval){ [ 643.942787] env[62096]: value = "task-397383" [ 643.942787] env[62096]: _type = "Task" [ 643.942787] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 643.951534] env[62096]: DEBUG oslo_vmware.api [None req-104fcf9b-d4a1-4685-8056-e7a4d7fd3107 tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] Task: {'id': task-397383, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 644.325135] env[62096]: DEBUG nova.compute.utils [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Using /dev/sd instead of None {{(pid=62096) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 644.325847] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Expecting reply to msg 2a936688b7714f1689d6e8380ffb757b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 644.328691] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Expecting reply to msg 1ee520820e31444f96524eac8ae0b79e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 644.330250] env[62096]: DEBUG nova.compute.manager [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] Allocating IP information in the background. {{(pid=62096) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 644.330436] env[62096]: DEBUG nova.network.neutron [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] allocate_for_instance() {{(pid=62096) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 644.336176] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1ee520820e31444f96524eac8ae0b79e [ 644.340491] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2a936688b7714f1689d6e8380ffb757b [ 644.384888] env[62096]: DEBUG nova.policy [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2babb5a1feb74792bfdf84538811475d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4ab5813e2204414394f0ecb3160f4687', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62096) authorize /opt/stack/nova/nova/policy.py:203}} [ 644.419623] env[62096]: INFO nova.scheduler.client.report [None req-a7dac61a-a9fc-4db2-bd21-9f05c56d2478 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Deleted allocations for instance 805c4c87-4e1a-4733-86a0-4c82daf615eb [ 644.425802] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a7dac61a-a9fc-4db2-bd21-9f05c56d2478 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Expecting reply to msg e022fa27adbb42a594806ba5a966841c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 644.440035] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e022fa27adbb42a594806ba5a966841c [ 644.456778] env[62096]: DEBUG oslo_vmware.api [None req-104fcf9b-d4a1-4685-8056-e7a4d7fd3107 tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] Task: {'id': task-397383, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.057359} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 644.457051] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-104fcf9b-d4a1-4685-8056-e7a4d7fd3107 tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] [instance: 06b9105a-8dcb-4bff-bba2-05e179036f24] Extended root virtual disk {{(pid=62096) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 644.457878] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7609ae01-0ba9-4059-9867-fe2f43e6125c {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.481618] env[62096]: DEBUG nova.virt.vmwareapi.volumeops [None req-104fcf9b-d4a1-4685-8056-e7a4d7fd3107 tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] [instance: 06b9105a-8dcb-4bff-bba2-05e179036f24] Reconfiguring VM instance instance-0000001b to attach disk [datastore2] 06b9105a-8dcb-4bff-bba2-05e179036f24/06b9105a-8dcb-4bff-bba2-05e179036f24.vmdk or device None with type sparse {{(pid=62096) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 644.482474] env[62096]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c2b508a8-c3d0-44c8-bc87-ececd6bbc0ad {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.503521] env[62096]: DEBUG oslo_vmware.api [None req-104fcf9b-d4a1-4685-8056-e7a4d7fd3107 tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] Waiting for the task: (returnval){ [ 644.503521] env[62096]: value = "task-397384" [ 644.503521] env[62096]: _type = "Task" [ 644.503521] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 644.513169] env[62096]: DEBUG oslo_vmware.api [None req-104fcf9b-d4a1-4685-8056-e7a4d7fd3107 tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] Task: {'id': task-397384, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 644.735036] env[62096]: DEBUG nova.network.neutron [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] Successfully created port: 6c8587cf-1294-4940-88ce-d97511d27160 {{(pid=62096) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 644.833770] env[62096]: DEBUG nova.compute.manager [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] Start building block device mappings for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 644.836058] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Expecting reply to msg d74d33a02a8143ed8a47ecf1e92d5201 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 644.893746] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d74d33a02a8143ed8a47ecf1e92d5201 [ 644.928225] env[62096]: DEBUG oslo_concurrency.lockutils [None req-a7dac61a-a9fc-4db2-bd21-9f05c56d2478 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Lock "805c4c87-4e1a-4733-86a0-4c82daf615eb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 83.252s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 644.929072] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-95f70777-dc9c-4d7d-99c8-605de6557417 tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Expecting reply to msg a3c1bf8b5f054345846120b1242b959d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 644.942682] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a3c1bf8b5f054345846120b1242b959d [ 645.017367] env[62096]: DEBUG oslo_vmware.api [None req-104fcf9b-d4a1-4685-8056-e7a4d7fd3107 tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] Task: {'id': task-397384, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 645.341173] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Expecting reply to msg 538c6aa188a144aaba28b98262ee9597 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 645.344661] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01132ebb-dd04-48e5-8d13-50b748816a8c {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.355514] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5cbf06e-a3ec-4d63-a69d-25979d342662 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.387295] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 538c6aa188a144aaba28b98262ee9597 [ 645.388438] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cee0e3e4-5cb6-4264-8773-0c5698deb958 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.396872] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec1b28a8-ece4-45a7-90f2-cda096013fc2 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.412844] env[62096]: DEBUG nova.compute.provider_tree [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 645.413340] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Expecting reply to msg 396414dddab64a2f95f6715e4003dc18 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 645.425624] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 396414dddab64a2f95f6715e4003dc18 [ 645.436220] env[62096]: DEBUG nova.compute.manager [None req-95f70777-dc9c-4d7d-99c8-605de6557417 tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] Starting instance... {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 645.437968] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-95f70777-dc9c-4d7d-99c8-605de6557417 tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Expecting reply to msg e7ee3b4a31e049c8a0b9355b9018cba0 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 645.480590] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e7ee3b4a31e049c8a0b9355b9018cba0 [ 645.516117] env[62096]: DEBUG oslo_vmware.api [None req-104fcf9b-d4a1-4685-8056-e7a4d7fd3107 tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] Task: {'id': task-397384, 'name': ReconfigVM_Task, 'duration_secs': 0.707641} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 645.516117] env[62096]: DEBUG nova.virt.vmwareapi.volumeops [None req-104fcf9b-d4a1-4685-8056-e7a4d7fd3107 tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] [instance: 06b9105a-8dcb-4bff-bba2-05e179036f24] Reconfigured VM instance instance-0000001b to attach disk [datastore2] 06b9105a-8dcb-4bff-bba2-05e179036f24/06b9105a-8dcb-4bff-bba2-05e179036f24.vmdk or device None with type sparse {{(pid=62096) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 645.516117] env[62096]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7ff2a947-b6aa-46ef-8aff-705965c5d930 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.521501] env[62096]: DEBUG oslo_vmware.api [None req-104fcf9b-d4a1-4685-8056-e7a4d7fd3107 tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] Waiting for the task: (returnval){ [ 645.521501] env[62096]: value = "task-397385" [ 645.521501] env[62096]: _type = "Task" [ 645.521501] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 645.530320] env[62096]: DEBUG oslo_vmware.api [None req-104fcf9b-d4a1-4685-8056-e7a4d7fd3107 tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] Task: {'id': task-397385, 'name': Rename_Task} progress is 5%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 645.627593] env[62096]: DEBUG nova.compute.manager [req-85438eaf-f2d2-4014-8a84-69700d95858c req-905aca00-b087-401a-adc8-81f89f2c8159 service nova] [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] Received event network-changed-6c8587cf-1294-4940-88ce-d97511d27160 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 645.627778] env[62096]: DEBUG nova.compute.manager [req-85438eaf-f2d2-4014-8a84-69700d95858c req-905aca00-b087-401a-adc8-81f89f2c8159 service nova] [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] Refreshing instance network info cache due to event network-changed-6c8587cf-1294-4940-88ce-d97511d27160. {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 645.627990] env[62096]: DEBUG oslo_concurrency.lockutils [req-85438eaf-f2d2-4014-8a84-69700d95858c req-905aca00-b087-401a-adc8-81f89f2c8159 service nova] Acquiring lock "refresh_cache-c2a06721-6848-4fc4-89da-5d292853b6e9" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 645.628507] env[62096]: DEBUG oslo_concurrency.lockutils [req-85438eaf-f2d2-4014-8a84-69700d95858c req-905aca00-b087-401a-adc8-81f89f2c8159 service nova] Acquired lock "refresh_cache-c2a06721-6848-4fc4-89da-5d292853b6e9" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 645.628711] env[62096]: DEBUG nova.network.neutron [req-85438eaf-f2d2-4014-8a84-69700d95858c req-905aca00-b087-401a-adc8-81f89f2c8159 service nova] [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] Refreshing network info cache for port 6c8587cf-1294-4940-88ce-d97511d27160 {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 645.629140] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-85438eaf-f2d2-4014-8a84-69700d95858c req-905aca00-b087-401a-adc8-81f89f2c8159 service nova] Expecting reply to msg 100a92e1a4974adc9da779b9e6f67248 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 645.637872] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 100a92e1a4974adc9da779b9e6f67248 [ 645.731995] env[62096]: ERROR nova.compute.manager [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 6c8587cf-1294-4940-88ce-d97511d27160, please check neutron logs for more information. [ 645.731995] env[62096]: ERROR nova.compute.manager Traceback (most recent call last): [ 645.731995] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 645.731995] env[62096]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 645.731995] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 645.731995] env[62096]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 645.731995] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 645.731995] env[62096]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 645.731995] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 645.731995] env[62096]: ERROR nova.compute.manager self.force_reraise() [ 645.731995] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 645.731995] env[62096]: ERROR nova.compute.manager raise self.value [ 645.731995] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 645.731995] env[62096]: ERROR nova.compute.manager updated_port = self._update_port( [ 645.731995] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 645.731995] env[62096]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 645.732495] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 645.732495] env[62096]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 645.732495] env[62096]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 6c8587cf-1294-4940-88ce-d97511d27160, please check neutron logs for more information. [ 645.732495] env[62096]: ERROR nova.compute.manager [ 645.732495] env[62096]: Traceback (most recent call last): [ 645.732495] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 645.732495] env[62096]: listener.cb(fileno) [ 645.732495] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 645.732495] env[62096]: result = function(*args, **kwargs) [ 645.732495] env[62096]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 645.732495] env[62096]: return func(*args, **kwargs) [ 645.732495] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 645.732495] env[62096]: raise e [ 645.732495] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 645.732495] env[62096]: nwinfo = self.network_api.allocate_for_instance( [ 645.732495] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 645.732495] env[62096]: created_port_ids = self._update_ports_for_instance( [ 645.732495] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 645.732495] env[62096]: with excutils.save_and_reraise_exception(): [ 645.732495] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 645.732495] env[62096]: self.force_reraise() [ 645.732495] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 645.732495] env[62096]: raise self.value [ 645.732495] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 645.732495] env[62096]: updated_port = self._update_port( [ 645.732495] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 645.732495] env[62096]: _ensure_no_port_binding_failure(port) [ 645.732495] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 645.732495] env[62096]: raise exception.PortBindingFailed(port_id=port['id']) [ 645.733229] env[62096]: nova.exception.PortBindingFailed: Binding failed for port 6c8587cf-1294-4940-88ce-d97511d27160, please check neutron logs for more information. [ 645.733229] env[62096]: Removing descriptor: 14 [ 645.844472] env[62096]: DEBUG nova.compute.manager [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] Start spawning the instance on the hypervisor. {{(pid=62096) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 645.868052] env[62096]: DEBUG nova.virt.hardware [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-19T11:52:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-19T11:51:55Z,direct_url=,disk_format='vmdk',id=fb764baa-3805-45c4-a694-aa91b0932110,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bd76085d004b408bbcf1ebf76ba2593e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-19T11:51:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 645.868404] env[62096]: DEBUG nova.virt.hardware [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Flavor limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 645.868571] env[62096]: DEBUG nova.virt.hardware [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Image limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 645.868752] env[62096]: DEBUG nova.virt.hardware [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Flavor pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 645.868895] env[62096]: DEBUG nova.virt.hardware [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Image pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 645.869042] env[62096]: DEBUG nova.virt.hardware [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 645.869243] env[62096]: DEBUG nova.virt.hardware [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 645.869398] env[62096]: DEBUG nova.virt.hardware [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 645.869558] env[62096]: DEBUG nova.virt.hardware [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Got 1 possible topologies {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 645.869715] env[62096]: DEBUG nova.virt.hardware [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 645.869882] env[62096]: DEBUG nova.virt.hardware [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 645.870724] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-712d63b5-630a-46da-a2d7-2569cf0b3066 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.878911] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-220697ff-8fe3-4794-b723-f32ee8f88ee1 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.895566] env[62096]: ERROR nova.compute.manager [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 6c8587cf-1294-4940-88ce-d97511d27160, please check neutron logs for more information. [ 645.895566] env[62096]: ERROR nova.compute.manager [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] Traceback (most recent call last): [ 645.895566] env[62096]: ERROR nova.compute.manager [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 645.895566] env[62096]: ERROR nova.compute.manager [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] yield resources [ 645.895566] env[62096]: ERROR nova.compute.manager [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 645.895566] env[62096]: ERROR nova.compute.manager [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] self.driver.spawn(context, instance, image_meta, [ 645.895566] env[62096]: ERROR nova.compute.manager [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 645.895566] env[62096]: ERROR nova.compute.manager [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 645.895566] env[62096]: ERROR nova.compute.manager [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 645.895566] env[62096]: ERROR nova.compute.manager [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] vm_ref = self.build_virtual_machine(instance, [ 645.895566] env[62096]: ERROR nova.compute.manager [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 645.895935] env[62096]: ERROR nova.compute.manager [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] vif_infos = vmwarevif.get_vif_info(self._session, [ 645.895935] env[62096]: ERROR nova.compute.manager [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 645.895935] env[62096]: ERROR nova.compute.manager [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] for vif in network_info: [ 645.895935] env[62096]: ERROR nova.compute.manager [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 645.895935] env[62096]: ERROR nova.compute.manager [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] return self._sync_wrapper(fn, *args, **kwargs) [ 645.895935] env[62096]: ERROR nova.compute.manager [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 645.895935] env[62096]: ERROR nova.compute.manager [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] self.wait() [ 645.895935] env[62096]: ERROR nova.compute.manager [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 645.895935] env[62096]: ERROR nova.compute.manager [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] self[:] = self._gt.wait() [ 645.895935] env[62096]: ERROR nova.compute.manager [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 645.895935] env[62096]: ERROR nova.compute.manager [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] return self._exit_event.wait() [ 645.895935] env[62096]: ERROR nova.compute.manager [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 645.895935] env[62096]: ERROR nova.compute.manager [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] current.throw(*self._exc) [ 645.896416] env[62096]: ERROR nova.compute.manager [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 645.896416] env[62096]: ERROR nova.compute.manager [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] result = function(*args, **kwargs) [ 645.896416] env[62096]: ERROR nova.compute.manager [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 645.896416] env[62096]: ERROR nova.compute.manager [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] return func(*args, **kwargs) [ 645.896416] env[62096]: ERROR nova.compute.manager [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 645.896416] env[62096]: ERROR nova.compute.manager [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] raise e [ 645.896416] env[62096]: ERROR nova.compute.manager [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 645.896416] env[62096]: ERROR nova.compute.manager [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] nwinfo = self.network_api.allocate_for_instance( [ 645.896416] env[62096]: ERROR nova.compute.manager [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 645.896416] env[62096]: ERROR nova.compute.manager [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] created_port_ids = self._update_ports_for_instance( [ 645.896416] env[62096]: ERROR nova.compute.manager [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 645.896416] env[62096]: ERROR nova.compute.manager [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] with excutils.save_and_reraise_exception(): [ 645.896416] env[62096]: ERROR nova.compute.manager [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 645.896783] env[62096]: ERROR nova.compute.manager [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] self.force_reraise() [ 645.896783] env[62096]: ERROR nova.compute.manager [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 645.896783] env[62096]: ERROR nova.compute.manager [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] raise self.value [ 645.896783] env[62096]: ERROR nova.compute.manager [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 645.896783] env[62096]: ERROR nova.compute.manager [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] updated_port = self._update_port( [ 645.896783] env[62096]: ERROR nova.compute.manager [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 645.896783] env[62096]: ERROR nova.compute.manager [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] _ensure_no_port_binding_failure(port) [ 645.896783] env[62096]: ERROR nova.compute.manager [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 645.896783] env[62096]: ERROR nova.compute.manager [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] raise exception.PortBindingFailed(port_id=port['id']) [ 645.896783] env[62096]: ERROR nova.compute.manager [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] nova.exception.PortBindingFailed: Binding failed for port 6c8587cf-1294-4940-88ce-d97511d27160, please check neutron logs for more information. [ 645.896783] env[62096]: ERROR nova.compute.manager [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] [ 645.896783] env[62096]: INFO nova.compute.manager [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] Terminating instance [ 645.897989] env[62096]: DEBUG oslo_concurrency.lockutils [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Acquiring lock "refresh_cache-c2a06721-6848-4fc4-89da-5d292853b6e9" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 645.916478] env[62096]: DEBUG nova.scheduler.client.report [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 645.918969] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Expecting reply to msg d8aa280bb670461aa0b411c585364453 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 645.930580] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d8aa280bb670461aa0b411c585364453 [ 645.961974] env[62096]: DEBUG oslo_concurrency.lockutils [None req-95f70777-dc9c-4d7d-99c8-605de6557417 tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 646.034068] env[62096]: DEBUG oslo_vmware.api [None req-104fcf9b-d4a1-4685-8056-e7a4d7fd3107 tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] Task: {'id': task-397385, 'name': Rename_Task, 'duration_secs': 0.136727} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 646.034421] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [None req-104fcf9b-d4a1-4685-8056-e7a4d7fd3107 tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] [instance: 06b9105a-8dcb-4bff-bba2-05e179036f24] Powering on the VM {{(pid=62096) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 646.035504] env[62096]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a8817932-94b1-4cb9-904b-fc92fa23cd00 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.042180] env[62096]: DEBUG oslo_vmware.api [None req-104fcf9b-d4a1-4685-8056-e7a4d7fd3107 tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] Waiting for the task: (returnval){ [ 646.042180] env[62096]: value = "task-397386" [ 646.042180] env[62096]: _type = "Task" [ 646.042180] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 646.056039] env[62096]: DEBUG oslo_vmware.api [None req-104fcf9b-d4a1-4685-8056-e7a4d7fd3107 tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] Task: {'id': task-397386, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 646.146403] env[62096]: DEBUG nova.network.neutron [req-85438eaf-f2d2-4014-8a84-69700d95858c req-905aca00-b087-401a-adc8-81f89f2c8159 service nova] [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 646.228137] env[62096]: DEBUG nova.network.neutron [req-85438eaf-f2d2-4014-8a84-69700d95858c req-905aca00-b087-401a-adc8-81f89f2c8159 service nova] [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 646.228702] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-85438eaf-f2d2-4014-8a84-69700d95858c req-905aca00-b087-401a-adc8-81f89f2c8159 service nova] Expecting reply to msg 17c1b1174830456297cdd24208a25aa3 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 646.237407] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 17c1b1174830456297cdd24208a25aa3 [ 646.422127] env[62096]: DEBUG oslo_concurrency.lockutils [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.613s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 646.422519] env[62096]: DEBUG nova.compute.manager [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] Start building networks asynchronously for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 646.424415] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Expecting reply to msg 847e4f1105234ac2b6e4985124d2bcf8 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 646.425557] env[62096]: DEBUG oslo_concurrency.lockutils [None req-b15e5306-4a15-4327-a851-ae7862f80688 tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 17.061s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 646.427145] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b15e5306-4a15-4327-a851-ae7862f80688 tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Expecting reply to msg 40e769e850ca4c48b2b8ad4b2fdbb671 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 646.459055] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 847e4f1105234ac2b6e4985124d2bcf8 [ 646.459996] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 40e769e850ca4c48b2b8ad4b2fdbb671 [ 646.552614] env[62096]: DEBUG oslo_vmware.api [None req-104fcf9b-d4a1-4685-8056-e7a4d7fd3107 tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] Task: {'id': task-397386, 'name': PowerOnVM_Task, 'duration_secs': 0.400985} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 646.552871] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [None req-104fcf9b-d4a1-4685-8056-e7a4d7fd3107 tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] [instance: 06b9105a-8dcb-4bff-bba2-05e179036f24] Powered on the VM {{(pid=62096) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 646.553064] env[62096]: INFO nova.compute.manager [None req-104fcf9b-d4a1-4685-8056-e7a4d7fd3107 tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] [instance: 06b9105a-8dcb-4bff-bba2-05e179036f24] Took 5.32 seconds to spawn the instance on the hypervisor. [ 646.553229] env[62096]: DEBUG nova.compute.manager [None req-104fcf9b-d4a1-4685-8056-e7a4d7fd3107 tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] [instance: 06b9105a-8dcb-4bff-bba2-05e179036f24] Checking state {{(pid=62096) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 646.553994] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e09c9ab1-1b46-47f4-ac33-2d5e045cd33f {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.561580] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-104fcf9b-d4a1-4685-8056-e7a4d7fd3107 tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] Expecting reply to msg 9d29286221d349818eb4dabb1642b524 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 646.594075] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9d29286221d349818eb4dabb1642b524 [ 646.731183] env[62096]: DEBUG oslo_concurrency.lockutils [req-85438eaf-f2d2-4014-8a84-69700d95858c req-905aca00-b087-401a-adc8-81f89f2c8159 service nova] Releasing lock "refresh_cache-c2a06721-6848-4fc4-89da-5d292853b6e9" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 646.731543] env[62096]: DEBUG oslo_concurrency.lockutils [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Acquired lock "refresh_cache-c2a06721-6848-4fc4-89da-5d292853b6e9" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 646.731776] env[62096]: DEBUG nova.network.neutron [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 646.732230] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Expecting reply to msg 5c6f0bbb543340bd80d9f51374ea99be in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 646.739790] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5c6f0bbb543340bd80d9f51374ea99be [ 646.785904] env[62096]: DEBUG oslo_concurrency.lockutils [None req-143f4a25-9058-4c14-8b18-575eda4d5e74 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Acquiring lock "cffc0ebc-4fb1-47c9-8882-b8431046ef2f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 646.786089] env[62096]: DEBUG oslo_concurrency.lockutils [None req-143f4a25-9058-4c14-8b18-575eda4d5e74 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Lock "cffc0ebc-4fb1-47c9-8882-b8431046ef2f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 646.929986] env[62096]: DEBUG nova.compute.utils [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Using /dev/sd instead of None {{(pid=62096) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 646.930652] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Expecting reply to msg 0050b1df43c8491cbff89f3483491b3f in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 646.934840] env[62096]: DEBUG nova.compute.manager [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] Allocating IP information in the background. {{(pid=62096) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 646.935038] env[62096]: DEBUG nova.network.neutron [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] allocate_for_instance() {{(pid=62096) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 646.940981] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0050b1df43c8491cbff89f3483491b3f [ 646.991351] env[62096]: DEBUG nova.policy [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2babb5a1feb74792bfdf84538811475d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4ab5813e2204414394f0ecb3160f4687', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62096) authorize /opt/stack/nova/nova/policy.py:203}} [ 647.028262] env[62096]: DEBUG nova.compute.manager [None req-00c6e032-f476-4c99-b46b-a25d1d5087b3 tempest-ServerDiagnosticsV248Test-489447487 tempest-ServerDiagnosticsV248Test-489447487-project-admin] [instance: 06b9105a-8dcb-4bff-bba2-05e179036f24] Checking state {{(pid=62096) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 647.029409] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-582dcd2c-df25-40ae-a14b-e8bd3c772957 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.038172] env[62096]: INFO nova.compute.manager [None req-00c6e032-f476-4c99-b46b-a25d1d5087b3 tempest-ServerDiagnosticsV248Test-489447487 tempest-ServerDiagnosticsV248Test-489447487-project-admin] [instance: 06b9105a-8dcb-4bff-bba2-05e179036f24] Retrieving diagnostics [ 647.038901] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef95a8b7-4318-440e-ac8a-08337c35d9b7 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.078186] env[62096]: INFO nova.compute.manager [None req-104fcf9b-d4a1-4685-8056-e7a4d7fd3107 tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] [instance: 06b9105a-8dcb-4bff-bba2-05e179036f24] Took 25.43 seconds to build instance. [ 647.080494] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-104fcf9b-d4a1-4685-8056-e7a4d7fd3107 tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] Expecting reply to msg ddf89acd6aed4714a9a03305a35c0fc1 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 647.094893] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ddf89acd6aed4714a9a03305a35c0fc1 [ 647.250650] env[62096]: DEBUG nova.network.neutron [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 647.281634] env[62096]: DEBUG nova.network.neutron [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] Successfully created port: 169d02ed-e804-4dc5-a136-c6d9178c5dcc {{(pid=62096) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 647.350622] env[62096]: DEBUG nova.network.neutron [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 647.351160] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Expecting reply to msg d015659da0b4439ca9e703a365e78926 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 647.359707] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d015659da0b4439ca9e703a365e78926 [ 647.379972] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0135c36-ca8a-4cac-8b25-1d0d025c3a4f {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.387607] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe1bd0a4-2eba-4843-a58f-3deb4d30e98d {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.090583] env[62096]: DEBUG nova.compute.manager [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] Start building block device mappings for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 648.094747] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Expecting reply to msg c0a9f65ca24d4b8da8ca9182d39bfefb in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 648.104764] env[62096]: DEBUG oslo_concurrency.lockutils [None req-104fcf9b-d4a1-4685-8056-e7a4d7fd3107 tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] Lock "06b9105a-8dcb-4bff-bba2-05e179036f24" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 72.823s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 648.105184] env[62096]: DEBUG oslo_concurrency.lockutils [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Releasing lock "refresh_cache-c2a06721-6848-4fc4-89da-5d292853b6e9" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 648.105568] env[62096]: DEBUG nova.compute.manager [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] Start destroying the instance on the hypervisor. {{(pid=62096) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 648.105755] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] Destroying instance {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 648.108955] env[62096]: DEBUG nova.compute.manager [req-8ef1c5b7-b9d7-4724-a0e5-3fe5481d584a req-841aa622-b249-4d44-a8b2-78dfcc12d986 service nova] [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] Received event network-vif-deleted-6c8587cf-1294-4940-88ce-d97511d27160 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 648.109515] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-cdd99b9e-14f8-4543-8c76-d91f63d6fc9c tempest-ServerGroupTestJSON-613982516 tempest-ServerGroupTestJSON-613982516-project-member] Expecting reply to msg 8277236524794ed895cba469ea9c724d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 648.110401] env[62096]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0966c4dc-b59d-403a-95f8-7b48b37cca0e {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.112673] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb65a4dd-e787-4294-b691-78ebf845b82f {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.118341] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8277236524794ed895cba469ea9c724d [ 648.122724] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc5ce51a-d719-4979-803c-7df8c2af4e9e {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.133306] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f559d96-da4b-4297-9f32-6fba49177af2 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.139050] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c0a9f65ca24d4b8da8ca9182d39bfefb [ 648.148837] env[62096]: DEBUG nova.compute.provider_tree [None req-b15e5306-4a15-4327-a851-ae7862f80688 tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 648.149301] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b15e5306-4a15-4327-a851-ae7862f80688 tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Expecting reply to msg 863aa8bb7a7d423baca916489f99a8d0 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 648.154069] env[62096]: WARNING nova.virt.vmwareapi.vmops [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance c2a06721-6848-4fc4-89da-5d292853b6e9 could not be found. [ 648.154285] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] Instance destroyed {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 648.154464] env[62096]: INFO nova.compute.manager [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] Took 0.05 seconds to destroy the instance on the hypervisor. [ 648.154695] env[62096]: DEBUG oslo.service.loopingcall [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 648.155107] env[62096]: DEBUG nova.compute.manager [-] [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 648.155195] env[62096]: DEBUG nova.network.neutron [-] [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 648.156987] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 863aa8bb7a7d423baca916489f99a8d0 [ 648.177497] env[62096]: DEBUG nova.network.neutron [-] [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 648.178039] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg bc99875b3f8447caae14c1bab2acef01 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 648.184893] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bc99875b3f8447caae14c1bab2acef01 [ 648.280856] env[62096]: ERROR nova.compute.manager [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 169d02ed-e804-4dc5-a136-c6d9178c5dcc, please check neutron logs for more information. [ 648.280856] env[62096]: ERROR nova.compute.manager Traceback (most recent call last): [ 648.280856] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 648.280856] env[62096]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 648.280856] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 648.280856] env[62096]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 648.280856] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 648.280856] env[62096]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 648.280856] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 648.280856] env[62096]: ERROR nova.compute.manager self.force_reraise() [ 648.280856] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 648.280856] env[62096]: ERROR nova.compute.manager raise self.value [ 648.280856] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 648.280856] env[62096]: ERROR nova.compute.manager updated_port = self._update_port( [ 648.280856] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 648.280856] env[62096]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 648.281428] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 648.281428] env[62096]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 648.281428] env[62096]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 169d02ed-e804-4dc5-a136-c6d9178c5dcc, please check neutron logs for more information. [ 648.281428] env[62096]: ERROR nova.compute.manager [ 648.281428] env[62096]: Traceback (most recent call last): [ 648.281428] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 648.281428] env[62096]: listener.cb(fileno) [ 648.281428] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 648.281428] env[62096]: result = function(*args, **kwargs) [ 648.281428] env[62096]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 648.281428] env[62096]: return func(*args, **kwargs) [ 648.281428] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 648.281428] env[62096]: raise e [ 648.281428] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 648.281428] env[62096]: nwinfo = self.network_api.allocate_for_instance( [ 648.281428] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 648.281428] env[62096]: created_port_ids = self._update_ports_for_instance( [ 648.281428] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 648.281428] env[62096]: with excutils.save_and_reraise_exception(): [ 648.281428] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 648.281428] env[62096]: self.force_reraise() [ 648.281428] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 648.281428] env[62096]: raise self.value [ 648.281428] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 648.281428] env[62096]: updated_port = self._update_port( [ 648.281428] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 648.281428] env[62096]: _ensure_no_port_binding_failure(port) [ 648.281428] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 648.281428] env[62096]: raise exception.PortBindingFailed(port_id=port['id']) [ 648.282228] env[62096]: nova.exception.PortBindingFailed: Binding failed for port 169d02ed-e804-4dc5-a136-c6d9178c5dcc, please check neutron logs for more information. [ 648.282228] env[62096]: Removing descriptor: 14 [ 648.609806] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Expecting reply to msg 4d49c097f8144518842795dbe874ba91 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 648.616402] env[62096]: DEBUG nova.compute.manager [None req-cdd99b9e-14f8-4543-8c76-d91f63d6fc9c tempest-ServerGroupTestJSON-613982516 tempest-ServerGroupTestJSON-613982516-project-member] [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] Starting instance... {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 648.618040] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-cdd99b9e-14f8-4543-8c76-d91f63d6fc9c tempest-ServerGroupTestJSON-613982516 tempest-ServerGroupTestJSON-613982516-project-member] Expecting reply to msg 653c02fe42574aa19617daf333097b98 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 648.639817] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4d49c097f8144518842795dbe874ba91 [ 648.648780] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 653c02fe42574aa19617daf333097b98 [ 648.656700] env[62096]: DEBUG nova.scheduler.client.report [None req-b15e5306-4a15-4327-a851-ae7862f80688 tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 648.659056] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b15e5306-4a15-4327-a851-ae7862f80688 tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Expecting reply to msg f09536e37ffe4f83b00da0a533d17e88 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 648.669092] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f09536e37ffe4f83b00da0a533d17e88 [ 648.679423] env[62096]: DEBUG nova.network.neutron [-] [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 648.679800] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 2d6ec0348a1442d6ac65f378136212c0 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 648.687314] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2d6ec0348a1442d6ac65f378136212c0 [ 649.112974] env[62096]: DEBUG nova.compute.manager [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] Start spawning the instance on the hypervisor. {{(pid=62096) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 649.138449] env[62096]: DEBUG nova.virt.hardware [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-19T11:52:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-19T11:51:55Z,direct_url=,disk_format='vmdk',id=fb764baa-3805-45c4-a694-aa91b0932110,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bd76085d004b408bbcf1ebf76ba2593e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-19T11:51:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 649.138720] env[62096]: DEBUG nova.virt.hardware [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Flavor limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 649.138892] env[62096]: DEBUG nova.virt.hardware [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Image limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 649.139076] env[62096]: DEBUG nova.virt.hardware [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Flavor pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 649.139220] env[62096]: DEBUG nova.virt.hardware [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Image pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 649.139365] env[62096]: DEBUG nova.virt.hardware [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 649.139563] env[62096]: DEBUG nova.virt.hardware [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 649.139714] env[62096]: DEBUG nova.virt.hardware [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 649.139873] env[62096]: DEBUG nova.virt.hardware [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Got 1 possible topologies {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 649.140069] env[62096]: DEBUG nova.virt.hardware [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 649.140275] env[62096]: DEBUG nova.virt.hardware [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 649.141104] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54425b40-79b5-4b1f-9c7b-dfa88cae3d3d {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.144383] env[62096]: DEBUG oslo_concurrency.lockutils [None req-cdd99b9e-14f8-4543-8c76-d91f63d6fc9c tempest-ServerGroupTestJSON-613982516 tempest-ServerGroupTestJSON-613982516-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 649.149799] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7f29c78-0856-43a9-9d04-cbf625fd623b {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.163960] env[62096]: DEBUG oslo_concurrency.lockutils [None req-b15e5306-4a15-4327-a851-ae7862f80688 tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.738s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 649.164578] env[62096]: ERROR nova.compute.manager [None req-b15e5306-4a15-4327-a851-ae7862f80688 tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 013f7baa-f84e-483f-8fe4-b01cd8c88b38, please check neutron logs for more information. [ 649.164578] env[62096]: ERROR nova.compute.manager [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] Traceback (most recent call last): [ 649.164578] env[62096]: ERROR nova.compute.manager [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 649.164578] env[62096]: ERROR nova.compute.manager [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] self.driver.spawn(context, instance, image_meta, [ 649.164578] env[62096]: ERROR nova.compute.manager [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 649.164578] env[62096]: ERROR nova.compute.manager [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 649.164578] env[62096]: ERROR nova.compute.manager [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 649.164578] env[62096]: ERROR nova.compute.manager [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] vm_ref = self.build_virtual_machine(instance, [ 649.164578] env[62096]: ERROR nova.compute.manager [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 649.164578] env[62096]: ERROR nova.compute.manager [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] vif_infos = vmwarevif.get_vif_info(self._session, [ 649.164578] env[62096]: ERROR nova.compute.manager [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 649.164943] env[62096]: ERROR nova.compute.manager [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] for vif in network_info: [ 649.164943] env[62096]: ERROR nova.compute.manager [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 649.164943] env[62096]: ERROR nova.compute.manager [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] return self._sync_wrapper(fn, *args, **kwargs) [ 649.164943] env[62096]: ERROR nova.compute.manager [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 649.164943] env[62096]: ERROR nova.compute.manager [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] self.wait() [ 649.164943] env[62096]: ERROR nova.compute.manager [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 649.164943] env[62096]: ERROR nova.compute.manager [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] self[:] = self._gt.wait() [ 649.164943] env[62096]: ERROR nova.compute.manager [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 649.164943] env[62096]: ERROR nova.compute.manager [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] return self._exit_event.wait() [ 649.164943] env[62096]: ERROR nova.compute.manager [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 649.164943] env[62096]: ERROR nova.compute.manager [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] result = hub.switch() [ 649.164943] env[62096]: ERROR nova.compute.manager [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 649.164943] env[62096]: ERROR nova.compute.manager [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] return self.greenlet.switch() [ 649.165386] env[62096]: ERROR nova.compute.manager [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 649.165386] env[62096]: ERROR nova.compute.manager [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] result = function(*args, **kwargs) [ 649.165386] env[62096]: ERROR nova.compute.manager [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 649.165386] env[62096]: ERROR nova.compute.manager [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] return func(*args, **kwargs) [ 649.165386] env[62096]: ERROR nova.compute.manager [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 649.165386] env[62096]: ERROR nova.compute.manager [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] raise e [ 649.165386] env[62096]: ERROR nova.compute.manager [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 649.165386] env[62096]: ERROR nova.compute.manager [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] nwinfo = self.network_api.allocate_for_instance( [ 649.165386] env[62096]: ERROR nova.compute.manager [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 649.165386] env[62096]: ERROR nova.compute.manager [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] created_port_ids = self._update_ports_for_instance( [ 649.165386] env[62096]: ERROR nova.compute.manager [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 649.165386] env[62096]: ERROR nova.compute.manager [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] with excutils.save_and_reraise_exception(): [ 649.165386] env[62096]: ERROR nova.compute.manager [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 649.165770] env[62096]: ERROR nova.compute.manager [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] self.force_reraise() [ 649.165770] env[62096]: ERROR nova.compute.manager [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 649.165770] env[62096]: ERROR nova.compute.manager [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] raise self.value [ 649.165770] env[62096]: ERROR nova.compute.manager [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 649.165770] env[62096]: ERROR nova.compute.manager [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] updated_port = self._update_port( [ 649.165770] env[62096]: ERROR nova.compute.manager [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 649.165770] env[62096]: ERROR nova.compute.manager [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] _ensure_no_port_binding_failure(port) [ 649.165770] env[62096]: ERROR nova.compute.manager [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 649.165770] env[62096]: ERROR nova.compute.manager [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] raise exception.PortBindingFailed(port_id=port['id']) [ 649.165770] env[62096]: ERROR nova.compute.manager [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] nova.exception.PortBindingFailed: Binding failed for port 013f7baa-f84e-483f-8fe4-b01cd8c88b38, please check neutron logs for more information. [ 649.165770] env[62096]: ERROR nova.compute.manager [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] [ 649.166098] env[62096]: DEBUG nova.compute.utils [None req-b15e5306-4a15-4327-a851-ae7862f80688 tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] Binding failed for port 013f7baa-f84e-483f-8fe4-b01cd8c88b38, please check neutron logs for more information. {{(pid=62096) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 649.166965] env[62096]: ERROR nova.compute.manager [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 169d02ed-e804-4dc5-a136-c6d9178c5dcc, please check neutron logs for more information. [ 649.166965] env[62096]: ERROR nova.compute.manager [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] Traceback (most recent call last): [ 649.166965] env[62096]: ERROR nova.compute.manager [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 649.166965] env[62096]: ERROR nova.compute.manager [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] yield resources [ 649.166965] env[62096]: ERROR nova.compute.manager [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 649.166965] env[62096]: ERROR nova.compute.manager [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] self.driver.spawn(context, instance, image_meta, [ 649.166965] env[62096]: ERROR nova.compute.manager [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 649.166965] env[62096]: ERROR nova.compute.manager [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 649.166965] env[62096]: ERROR nova.compute.manager [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 649.166965] env[62096]: ERROR nova.compute.manager [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] vm_ref = self.build_virtual_machine(instance, [ 649.166965] env[62096]: ERROR nova.compute.manager [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 649.167381] env[62096]: ERROR nova.compute.manager [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] vif_infos = vmwarevif.get_vif_info(self._session, [ 649.167381] env[62096]: ERROR nova.compute.manager [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 649.167381] env[62096]: ERROR nova.compute.manager [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] for vif in network_info: [ 649.167381] env[62096]: ERROR nova.compute.manager [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 649.167381] env[62096]: ERROR nova.compute.manager [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] return self._sync_wrapper(fn, *args, **kwargs) [ 649.167381] env[62096]: ERROR nova.compute.manager [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 649.167381] env[62096]: ERROR nova.compute.manager [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] self.wait() [ 649.167381] env[62096]: ERROR nova.compute.manager [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 649.167381] env[62096]: ERROR nova.compute.manager [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] self[:] = self._gt.wait() [ 649.167381] env[62096]: ERROR nova.compute.manager [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 649.167381] env[62096]: ERROR nova.compute.manager [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] return self._exit_event.wait() [ 649.167381] env[62096]: ERROR nova.compute.manager [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 649.167381] env[62096]: ERROR nova.compute.manager [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] current.throw(*self._exc) [ 649.167732] env[62096]: ERROR nova.compute.manager [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 649.167732] env[62096]: ERROR nova.compute.manager [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] result = function(*args, **kwargs) [ 649.167732] env[62096]: ERROR nova.compute.manager [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 649.167732] env[62096]: ERROR nova.compute.manager [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] return func(*args, **kwargs) [ 649.167732] env[62096]: ERROR nova.compute.manager [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 649.167732] env[62096]: ERROR nova.compute.manager [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] raise e [ 649.167732] env[62096]: ERROR nova.compute.manager [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 649.167732] env[62096]: ERROR nova.compute.manager [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] nwinfo = self.network_api.allocate_for_instance( [ 649.167732] env[62096]: ERROR nova.compute.manager [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 649.167732] env[62096]: ERROR nova.compute.manager [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] created_port_ids = self._update_ports_for_instance( [ 649.167732] env[62096]: ERROR nova.compute.manager [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 649.167732] env[62096]: ERROR nova.compute.manager [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] with excutils.save_and_reraise_exception(): [ 649.167732] env[62096]: ERROR nova.compute.manager [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 649.168130] env[62096]: ERROR nova.compute.manager [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] self.force_reraise() [ 649.168130] env[62096]: ERROR nova.compute.manager [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 649.168130] env[62096]: ERROR nova.compute.manager [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] raise self.value [ 649.168130] env[62096]: ERROR nova.compute.manager [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 649.168130] env[62096]: ERROR nova.compute.manager [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] updated_port = self._update_port( [ 649.168130] env[62096]: ERROR nova.compute.manager [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 649.168130] env[62096]: ERROR nova.compute.manager [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] _ensure_no_port_binding_failure(port) [ 649.168130] env[62096]: ERROR nova.compute.manager [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 649.168130] env[62096]: ERROR nova.compute.manager [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] raise exception.PortBindingFailed(port_id=port['id']) [ 649.168130] env[62096]: ERROR nova.compute.manager [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] nova.exception.PortBindingFailed: Binding failed for port 169d02ed-e804-4dc5-a136-c6d9178c5dcc, please check neutron logs for more information. [ 649.168130] env[62096]: ERROR nova.compute.manager [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] [ 649.168130] env[62096]: INFO nova.compute.manager [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] Terminating instance [ 649.168790] env[62096]: DEBUG nova.compute.manager [None req-b15e5306-4a15-4327-a851-ae7862f80688 tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] Build of instance e48a5bde-e299-4567-b952-3c5f096fb65d was re-scheduled: Binding failed for port 013f7baa-f84e-483f-8fe4-b01cd8c88b38, please check neutron logs for more information. {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 649.169212] env[62096]: DEBUG nova.compute.manager [None req-b15e5306-4a15-4327-a851-ae7862f80688 tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] Unplugging VIFs for instance {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 649.169408] env[62096]: DEBUG oslo_concurrency.lockutils [None req-b15e5306-4a15-4327-a851-ae7862f80688 tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Acquiring lock "refresh_cache-e48a5bde-e299-4567-b952-3c5f096fb65d" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 649.169550] env[62096]: DEBUG oslo_concurrency.lockutils [None req-b15e5306-4a15-4327-a851-ae7862f80688 tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Acquired lock "refresh_cache-e48a5bde-e299-4567-b952-3c5f096fb65d" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 649.169703] env[62096]: DEBUG nova.network.neutron [None req-b15e5306-4a15-4327-a851-ae7862f80688 tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 649.170098] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b15e5306-4a15-4327-a851-ae7862f80688 tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Expecting reply to msg eb1b4b6dcd7743a681e3d62877d3a207 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 649.170818] env[62096]: DEBUG oslo_concurrency.lockutils [None req-b165e3d0-efb4-4eaf-a7d6-1c4c9099d3ba tempest-ImagesOneServerTestJSON-1002718403 tempest-ImagesOneServerTestJSON-1002718403-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 17.521s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 649.172614] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b165e3d0-efb4-4eaf-a7d6-1c4c9099d3ba tempest-ImagesOneServerTestJSON-1002718403 tempest-ImagesOneServerTestJSON-1002718403-project-member] Expecting reply to msg ce0f04d33bc34582992628b1aab0a9a0 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 649.173957] env[62096]: DEBUG oslo_concurrency.lockutils [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Acquiring lock "refresh_cache-f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 649.174069] env[62096]: DEBUG oslo_concurrency.lockutils [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Acquired lock "refresh_cache-f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 649.174196] env[62096]: DEBUG nova.network.neutron [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 649.174568] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Expecting reply to msg be9a0b4240074eb184b6cc72752f5e3f in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 649.176604] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg eb1b4b6dcd7743a681e3d62877d3a207 [ 649.179867] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg be9a0b4240074eb184b6cc72752f5e3f [ 649.181281] env[62096]: INFO nova.compute.manager [-] [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] Took 1.03 seconds to deallocate network for instance. [ 649.183223] env[62096]: DEBUG nova.compute.claims [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] Aborting claim: {{(pid=62096) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 649.183395] env[62096]: DEBUG oslo_concurrency.lockutils [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 649.208914] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ce0f04d33bc34582992628b1aab0a9a0 [ 649.713613] env[62096]: DEBUG nova.network.neutron [None req-b15e5306-4a15-4327-a851-ae7862f80688 tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 649.740994] env[62096]: DEBUG nova.network.neutron [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 649.782937] env[62096]: DEBUG nova.network.neutron [None req-b15e5306-4a15-4327-a851-ae7862f80688 tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 649.783721] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b15e5306-4a15-4327-a851-ae7862f80688 tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Expecting reply to msg 997bdab121fd451b8ae7521fa3a1a6eb in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 649.794719] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 997bdab121fd451b8ae7521fa3a1a6eb [ 649.857660] env[62096]: DEBUG nova.compute.manager [req-be263e4c-7f2e-4cbd-9e20-7ead5522e130 req-b0e577a3-4d8b-4beb-9757-62ce3d1ad284 service nova] [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] Received event network-changed-169d02ed-e804-4dc5-a136-c6d9178c5dcc {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 649.857858] env[62096]: DEBUG nova.compute.manager [req-be263e4c-7f2e-4cbd-9e20-7ead5522e130 req-b0e577a3-4d8b-4beb-9757-62ce3d1ad284 service nova] [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] Refreshing instance network info cache due to event network-changed-169d02ed-e804-4dc5-a136-c6d9178c5dcc. {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 649.858056] env[62096]: DEBUG oslo_concurrency.lockutils [req-be263e4c-7f2e-4cbd-9e20-7ead5522e130 req-b0e577a3-4d8b-4beb-9757-62ce3d1ad284 service nova] Acquiring lock "refresh_cache-f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 649.860161] env[62096]: DEBUG nova.network.neutron [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 649.860602] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Expecting reply to msg 3efffe25eab142dfb1cd17b6cb31a936 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 649.868981] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3efffe25eab142dfb1cd17b6cb31a936 [ 650.192991] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-568b83d3-55d8-4510-902a-27e9b6aad5b0 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.197714] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56ac92d8-9195-44e8-9029-1be41b4a051d {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.227353] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cfab020-ad80-4a8c-86af-745ad04ade69 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.234190] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fba3e2ea-c1bb-4bd0-a9bd-f88157afa5dc {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.246873] env[62096]: DEBUG nova.compute.provider_tree [None req-b165e3d0-efb4-4eaf-a7d6-1c4c9099d3ba tempest-ImagesOneServerTestJSON-1002718403 tempest-ImagesOneServerTestJSON-1002718403-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 650.247356] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b165e3d0-efb4-4eaf-a7d6-1c4c9099d3ba tempest-ImagesOneServerTestJSON-1002718403 tempest-ImagesOneServerTestJSON-1002718403-project-member] Expecting reply to msg 61a6db3fcc8a451a82f99bb59262d8fc in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 650.254709] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 61a6db3fcc8a451a82f99bb59262d8fc [ 650.285876] env[62096]: DEBUG oslo_concurrency.lockutils [None req-b15e5306-4a15-4327-a851-ae7862f80688 tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Releasing lock "refresh_cache-e48a5bde-e299-4567-b952-3c5f096fb65d" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 650.286107] env[62096]: DEBUG nova.compute.manager [None req-b15e5306-4a15-4327-a851-ae7862f80688 tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 650.286309] env[62096]: DEBUG nova.compute.manager [None req-b15e5306-4a15-4327-a851-ae7862f80688 tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 650.286479] env[62096]: DEBUG nova.network.neutron [None req-b15e5306-4a15-4327-a851-ae7862f80688 tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 650.299265] env[62096]: DEBUG nova.network.neutron [None req-b15e5306-4a15-4327-a851-ae7862f80688 tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 650.299818] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b15e5306-4a15-4327-a851-ae7862f80688 tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Expecting reply to msg 1f3d5849e6b843eeb32959d60fadfe34 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 650.306748] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1f3d5849e6b843eeb32959d60fadfe34 [ 650.368463] env[62096]: DEBUG oslo_concurrency.lockutils [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Releasing lock "refresh_cache-f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 650.368890] env[62096]: DEBUG nova.compute.manager [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] Start destroying the instance on the hypervisor. {{(pid=62096) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 650.369086] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] Destroying instance {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 650.369391] env[62096]: DEBUG oslo_concurrency.lockutils [req-be263e4c-7f2e-4cbd-9e20-7ead5522e130 req-b0e577a3-4d8b-4beb-9757-62ce3d1ad284 service nova] Acquired lock "refresh_cache-f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 650.369563] env[62096]: DEBUG nova.network.neutron [req-be263e4c-7f2e-4cbd-9e20-7ead5522e130 req-b0e577a3-4d8b-4beb-9757-62ce3d1ad284 service nova] [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] Refreshing network info cache for port 169d02ed-e804-4dc5-a136-c6d9178c5dcc {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 650.369987] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-be263e4c-7f2e-4cbd-9e20-7ead5522e130 req-b0e577a3-4d8b-4beb-9757-62ce3d1ad284 service nova] Expecting reply to msg a64337701683477490967f626f53bbe3 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 650.370770] env[62096]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9e1bc659-c973-428e-808d-468b098152aa {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.379052] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a64337701683477490967f626f53bbe3 [ 650.386246] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1be6a7a-6b61-4846-a1ee-e29033c09d47 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.408355] env[62096]: WARNING nova.virt.vmwareapi.vmops [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1 could not be found. [ 650.408563] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] Instance destroyed {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 650.408739] env[62096]: INFO nova.compute.manager [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] Took 0.04 seconds to destroy the instance on the hypervisor. [ 650.408982] env[62096]: DEBUG oslo.service.loopingcall [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 650.409206] env[62096]: DEBUG nova.compute.manager [-] [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 650.409298] env[62096]: DEBUG nova.network.neutron [-] [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 650.423082] env[62096]: DEBUG nova.network.neutron [-] [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 650.423390] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg cf3bb1ba9249414890f0158af0ee1319 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 650.429562] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cf3bb1ba9249414890f0158af0ee1319 [ 650.755414] env[62096]: DEBUG nova.scheduler.client.report [None req-b165e3d0-efb4-4eaf-a7d6-1c4c9099d3ba tempest-ImagesOneServerTestJSON-1002718403 tempest-ImagesOneServerTestJSON-1002718403-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 650.757781] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b165e3d0-efb4-4eaf-a7d6-1c4c9099d3ba tempest-ImagesOneServerTestJSON-1002718403 tempest-ImagesOneServerTestJSON-1002718403-project-member] Expecting reply to msg 3535113c2f6b4affbf4723d215f775ee in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 650.768835] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3535113c2f6b4affbf4723d215f775ee [ 650.801965] env[62096]: DEBUG nova.network.neutron [None req-b15e5306-4a15-4327-a851-ae7862f80688 tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 650.802529] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b15e5306-4a15-4327-a851-ae7862f80688 tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Expecting reply to msg c798f6d192df4a81a85db69629701e1e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 650.810548] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c798f6d192df4a81a85db69629701e1e [ 650.900337] env[62096]: DEBUG nova.network.neutron [req-be263e4c-7f2e-4cbd-9e20-7ead5522e130 req-b0e577a3-4d8b-4beb-9757-62ce3d1ad284 service nova] [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 650.925571] env[62096]: DEBUG nova.network.neutron [-] [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 650.926022] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg ff16d829cbb84d5ca6ba5870d7d49522 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 650.934128] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ff16d829cbb84d5ca6ba5870d7d49522 [ 651.007789] env[62096]: DEBUG nova.network.neutron [req-be263e4c-7f2e-4cbd-9e20-7ead5522e130 req-b0e577a3-4d8b-4beb-9757-62ce3d1ad284 service nova] [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 651.008319] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-be263e4c-7f2e-4cbd-9e20-7ead5522e130 req-b0e577a3-4d8b-4beb-9757-62ce3d1ad284 service nova] Expecting reply to msg 1ee8a1fac96d4eccbb15f1bb0b9a959c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 651.016094] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1ee8a1fac96d4eccbb15f1bb0b9a959c [ 651.267216] env[62096]: DEBUG oslo_concurrency.lockutils [None req-b165e3d0-efb4-4eaf-a7d6-1c4c9099d3ba tempest-ImagesOneServerTestJSON-1002718403 tempest-ImagesOneServerTestJSON-1002718403-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.096s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 651.267847] env[62096]: ERROR nova.compute.manager [None req-b165e3d0-efb4-4eaf-a7d6-1c4c9099d3ba tempest-ImagesOneServerTestJSON-1002718403 tempest-ImagesOneServerTestJSON-1002718403-project-member] [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port ec4bdedd-76f7-4dad-92db-f301287338c1, please check neutron logs for more information. [ 651.267847] env[62096]: ERROR nova.compute.manager [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] Traceback (most recent call last): [ 651.267847] env[62096]: ERROR nova.compute.manager [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 651.267847] env[62096]: ERROR nova.compute.manager [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] self.driver.spawn(context, instance, image_meta, [ 651.267847] env[62096]: ERROR nova.compute.manager [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 651.267847] env[62096]: ERROR nova.compute.manager [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] self._vmops.spawn(context, instance, image_meta, injected_files, [ 651.267847] env[62096]: ERROR nova.compute.manager [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 651.267847] env[62096]: ERROR nova.compute.manager [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] vm_ref = self.build_virtual_machine(instance, [ 651.267847] env[62096]: ERROR nova.compute.manager [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 651.267847] env[62096]: ERROR nova.compute.manager [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] vif_infos = vmwarevif.get_vif_info(self._session, [ 651.267847] env[62096]: ERROR nova.compute.manager [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 651.268253] env[62096]: ERROR nova.compute.manager [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] for vif in network_info: [ 651.268253] env[62096]: ERROR nova.compute.manager [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 651.268253] env[62096]: ERROR nova.compute.manager [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] return self._sync_wrapper(fn, *args, **kwargs) [ 651.268253] env[62096]: ERROR nova.compute.manager [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 651.268253] env[62096]: ERROR nova.compute.manager [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] self.wait() [ 651.268253] env[62096]: ERROR nova.compute.manager [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 651.268253] env[62096]: ERROR nova.compute.manager [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] self[:] = self._gt.wait() [ 651.268253] env[62096]: ERROR nova.compute.manager [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 651.268253] env[62096]: ERROR nova.compute.manager [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] return self._exit_event.wait() [ 651.268253] env[62096]: ERROR nova.compute.manager [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 651.268253] env[62096]: ERROR nova.compute.manager [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] current.throw(*self._exc) [ 651.268253] env[62096]: ERROR nova.compute.manager [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 651.268253] env[62096]: ERROR nova.compute.manager [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] result = function(*args, **kwargs) [ 651.268636] env[62096]: ERROR nova.compute.manager [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 651.268636] env[62096]: ERROR nova.compute.manager [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] return func(*args, **kwargs) [ 651.268636] env[62096]: ERROR nova.compute.manager [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 651.268636] env[62096]: ERROR nova.compute.manager [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] raise e [ 651.268636] env[62096]: ERROR nova.compute.manager [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 651.268636] env[62096]: ERROR nova.compute.manager [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] nwinfo = self.network_api.allocate_for_instance( [ 651.268636] env[62096]: ERROR nova.compute.manager [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 651.268636] env[62096]: ERROR nova.compute.manager [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] created_port_ids = self._update_ports_for_instance( [ 651.268636] env[62096]: ERROR nova.compute.manager [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 651.268636] env[62096]: ERROR nova.compute.manager [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] with excutils.save_and_reraise_exception(): [ 651.268636] env[62096]: ERROR nova.compute.manager [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 651.268636] env[62096]: ERROR nova.compute.manager [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] self.force_reraise() [ 651.268636] env[62096]: ERROR nova.compute.manager [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 651.269017] env[62096]: ERROR nova.compute.manager [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] raise self.value [ 651.269017] env[62096]: ERROR nova.compute.manager [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 651.269017] env[62096]: ERROR nova.compute.manager [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] updated_port = self._update_port( [ 651.269017] env[62096]: ERROR nova.compute.manager [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 651.269017] env[62096]: ERROR nova.compute.manager [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] _ensure_no_port_binding_failure(port) [ 651.269017] env[62096]: ERROR nova.compute.manager [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 651.269017] env[62096]: ERROR nova.compute.manager [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] raise exception.PortBindingFailed(port_id=port['id']) [ 651.269017] env[62096]: ERROR nova.compute.manager [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] nova.exception.PortBindingFailed: Binding failed for port ec4bdedd-76f7-4dad-92db-f301287338c1, please check neutron logs for more information. [ 651.269017] env[62096]: ERROR nova.compute.manager [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] [ 651.269017] env[62096]: DEBUG nova.compute.utils [None req-b165e3d0-efb4-4eaf-a7d6-1c4c9099d3ba tempest-ImagesOneServerTestJSON-1002718403 tempest-ImagesOneServerTestJSON-1002718403-project-member] [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] Binding failed for port ec4bdedd-76f7-4dad-92db-f301287338c1, please check neutron logs for more information. {{(pid=62096) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 651.270083] env[62096]: DEBUG oslo_concurrency.lockutils [None req-6f2dbda3-b665-4f79-8956-f4f8aac23815 tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.012s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 651.270302] env[62096]: DEBUG nova.objects.instance [None req-6f2dbda3-b665-4f79-8956-f4f8aac23815 tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Lazy-loading 'resources' on Instance uuid 12a4b44d-b6aa-45e2-bba9-d73f41fa4b61 {{(pid=62096) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 651.270631] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-6f2dbda3-b665-4f79-8956-f4f8aac23815 tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Expecting reply to msg 318999da03f84a78a21b04e97bb58f9f in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 651.278729] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 318999da03f84a78a21b04e97bb58f9f [ 651.282979] env[62096]: DEBUG nova.compute.manager [None req-b165e3d0-efb4-4eaf-a7d6-1c4c9099d3ba tempest-ImagesOneServerTestJSON-1002718403 tempest-ImagesOneServerTestJSON-1002718403-project-member] [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] Build of instance 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb was re-scheduled: Binding failed for port ec4bdedd-76f7-4dad-92db-f301287338c1, please check neutron logs for more information. {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 651.283419] env[62096]: DEBUG nova.compute.manager [None req-b165e3d0-efb4-4eaf-a7d6-1c4c9099d3ba tempest-ImagesOneServerTestJSON-1002718403 tempest-ImagesOneServerTestJSON-1002718403-project-member] [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] Unplugging VIFs for instance {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 651.283691] env[62096]: DEBUG oslo_concurrency.lockutils [None req-b165e3d0-efb4-4eaf-a7d6-1c4c9099d3ba tempest-ImagesOneServerTestJSON-1002718403 tempest-ImagesOneServerTestJSON-1002718403-project-member] Acquiring lock "refresh_cache-69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 651.283783] env[62096]: DEBUG oslo_concurrency.lockutils [None req-b165e3d0-efb4-4eaf-a7d6-1c4c9099d3ba tempest-ImagesOneServerTestJSON-1002718403 tempest-ImagesOneServerTestJSON-1002718403-project-member] Acquired lock "refresh_cache-69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 651.283974] env[62096]: DEBUG nova.network.neutron [None req-b165e3d0-efb4-4eaf-a7d6-1c4c9099d3ba tempest-ImagesOneServerTestJSON-1002718403 tempest-ImagesOneServerTestJSON-1002718403-project-member] [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 651.284385] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b165e3d0-efb4-4eaf-a7d6-1c4c9099d3ba tempest-ImagesOneServerTestJSON-1002718403 tempest-ImagesOneServerTestJSON-1002718403-project-member] Expecting reply to msg e1b689be664e42f8bc8b5e2153d787bf in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 651.292484] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e1b689be664e42f8bc8b5e2153d787bf [ 651.304924] env[62096]: INFO nova.compute.manager [None req-b15e5306-4a15-4327-a851-ae7862f80688 tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] [instance: e48a5bde-e299-4567-b952-3c5f096fb65d] Took 1.02 seconds to deallocate network for instance. [ 651.306473] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b15e5306-4a15-4327-a851-ae7862f80688 tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Expecting reply to msg f0f58054ac884071b972bc821b208eb2 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 651.347717] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f0f58054ac884071b972bc821b208eb2 [ 651.434243] env[62096]: INFO nova.compute.manager [-] [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] Took 1.02 seconds to deallocate network for instance. [ 651.436525] env[62096]: DEBUG nova.compute.claims [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] Aborting claim: {{(pid=62096) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 651.436705] env[62096]: DEBUG oslo_concurrency.lockutils [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 651.510488] env[62096]: DEBUG oslo_concurrency.lockutils [req-be263e4c-7f2e-4cbd-9e20-7ead5522e130 req-b0e577a3-4d8b-4beb-9757-62ce3d1ad284 service nova] Releasing lock "refresh_cache-f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 651.510722] env[62096]: DEBUG nova.compute.manager [req-be263e4c-7f2e-4cbd-9e20-7ead5522e130 req-b0e577a3-4d8b-4beb-9757-62ce3d1ad284 service nova] [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] Received event network-vif-deleted-169d02ed-e804-4dc5-a136-c6d9178c5dcc {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 651.664833] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b02a7d44-5347-436d-b6dd-80a41f152913 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.672383] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c17a646-6d2c-43f9-9121-3a6ec7276a4d {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.707977] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22878440-fa9f-42a4-9d2a-4150955069f3 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.714507] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcd4ba15-5faf-4651-9fe7-ac451b61b581 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.726750] env[62096]: DEBUG nova.compute.provider_tree [None req-6f2dbda3-b665-4f79-8956-f4f8aac23815 tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 651.727233] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-6f2dbda3-b665-4f79-8956-f4f8aac23815 tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Expecting reply to msg 05f693c3735d42f2bd97863c0012cdd4 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 651.734318] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 05f693c3735d42f2bd97863c0012cdd4 [ 651.802682] env[62096]: DEBUG nova.network.neutron [None req-b165e3d0-efb4-4eaf-a7d6-1c4c9099d3ba tempest-ImagesOneServerTestJSON-1002718403 tempest-ImagesOneServerTestJSON-1002718403-project-member] [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 651.819983] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b15e5306-4a15-4327-a851-ae7862f80688 tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Expecting reply to msg c6a89773bb5d4e60b453a95ee3883c54 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 651.852663] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c6a89773bb5d4e60b453a95ee3883c54 [ 651.903640] env[62096]: DEBUG nova.network.neutron [None req-b165e3d0-efb4-4eaf-a7d6-1c4c9099d3ba tempest-ImagesOneServerTestJSON-1002718403 tempest-ImagesOneServerTestJSON-1002718403-project-member] [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 651.904402] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b165e3d0-efb4-4eaf-a7d6-1c4c9099d3ba tempest-ImagesOneServerTestJSON-1002718403 tempest-ImagesOneServerTestJSON-1002718403-project-member] Expecting reply to msg 81da7eff319641cb9c1204929290cbd5 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 651.915866] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 81da7eff319641cb9c1204929290cbd5 [ 652.230287] env[62096]: DEBUG nova.scheduler.client.report [None req-6f2dbda3-b665-4f79-8956-f4f8aac23815 tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 652.232682] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-6f2dbda3-b665-4f79-8956-f4f8aac23815 tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Expecting reply to msg 41571f355e7e488ab5b667ca5c847db7 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 652.243930] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 41571f355e7e488ab5b667ca5c847db7 [ 652.351599] env[62096]: INFO nova.scheduler.client.report [None req-b15e5306-4a15-4327-a851-ae7862f80688 tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Deleted allocations for instance e48a5bde-e299-4567-b952-3c5f096fb65d [ 652.357659] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b15e5306-4a15-4327-a851-ae7862f80688 tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Expecting reply to msg b11aad26191d49f59d7d9fe9316b43dd in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 652.370412] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b11aad26191d49f59d7d9fe9316b43dd [ 652.407130] env[62096]: DEBUG oslo_concurrency.lockutils [None req-b165e3d0-efb4-4eaf-a7d6-1c4c9099d3ba tempest-ImagesOneServerTestJSON-1002718403 tempest-ImagesOneServerTestJSON-1002718403-project-member] Releasing lock "refresh_cache-69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 652.407278] env[62096]: DEBUG nova.compute.manager [None req-b165e3d0-efb4-4eaf-a7d6-1c4c9099d3ba tempest-ImagesOneServerTestJSON-1002718403 tempest-ImagesOneServerTestJSON-1002718403-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 652.407461] env[62096]: DEBUG nova.compute.manager [None req-b165e3d0-efb4-4eaf-a7d6-1c4c9099d3ba tempest-ImagesOneServerTestJSON-1002718403 tempest-ImagesOneServerTestJSON-1002718403-project-member] [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 652.407623] env[62096]: DEBUG nova.network.neutron [None req-b165e3d0-efb4-4eaf-a7d6-1c4c9099d3ba tempest-ImagesOneServerTestJSON-1002718403 tempest-ImagesOneServerTestJSON-1002718403-project-member] [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 652.426907] env[62096]: DEBUG nova.network.neutron [None req-b165e3d0-efb4-4eaf-a7d6-1c4c9099d3ba tempest-ImagesOneServerTestJSON-1002718403 tempest-ImagesOneServerTestJSON-1002718403-project-member] [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 652.427471] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b165e3d0-efb4-4eaf-a7d6-1c4c9099d3ba tempest-ImagesOneServerTestJSON-1002718403 tempest-ImagesOneServerTestJSON-1002718403-project-member] Expecting reply to msg 202907671d0442829058e66f818be2f1 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 652.433387] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 202907671d0442829058e66f818be2f1 [ 652.735381] env[62096]: DEBUG oslo_concurrency.lockutils [None req-6f2dbda3-b665-4f79-8956-f4f8aac23815 tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.465s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 652.738487] env[62096]: DEBUG oslo_concurrency.lockutils [None req-b67d5e24-acb6-4b89-8ad7-369461375b68 tempest-ServerExternalEventsTest-882999065 tempest-ServerExternalEventsTest-882999065-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.552s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 652.739356] env[62096]: INFO nova.compute.claims [None req-b67d5e24-acb6-4b89-8ad7-369461375b68 tempest-ServerExternalEventsTest-882999065 tempest-ServerExternalEventsTest-882999065-project-member] [instance: 62b778aa-71c7-480b-8148-017773246caf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 652.740947] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b67d5e24-acb6-4b89-8ad7-369461375b68 tempest-ServerExternalEventsTest-882999065 tempest-ServerExternalEventsTest-882999065-project-member] Expecting reply to msg e08d51b8f8f140829af70759f80f878d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 652.752490] env[62096]: INFO nova.scheduler.client.report [None req-6f2dbda3-b665-4f79-8956-f4f8aac23815 tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Deleted allocations for instance 12a4b44d-b6aa-45e2-bba9-d73f41fa4b61 [ 652.755194] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-6f2dbda3-b665-4f79-8956-f4f8aac23815 tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Expecting reply to msg 48a8240e88c74ea9b15ac27853d6db49 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 652.774958] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e08d51b8f8f140829af70759f80f878d [ 652.797399] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 48a8240e88c74ea9b15ac27853d6db49 [ 652.860296] env[62096]: DEBUG oslo_concurrency.lockutils [None req-b15e5306-4a15-4327-a851-ae7862f80688 tempest-DeleteServersAdminTestJSON-1001706998 tempest-DeleteServersAdminTestJSON-1001706998-project-member] Lock "e48a5bde-e299-4567-b952-3c5f096fb65d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 85.274s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 652.860853] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-5ad75242-2efb-4f8b-a5d0-817319fe309a tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Expecting reply to msg 816edf93d80e4343964c4769b5ae93c9 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 652.870082] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 816edf93d80e4343964c4769b5ae93c9 [ 652.929322] env[62096]: DEBUG nova.network.neutron [None req-b165e3d0-efb4-4eaf-a7d6-1c4c9099d3ba tempest-ImagesOneServerTestJSON-1002718403 tempest-ImagesOneServerTestJSON-1002718403-project-member] [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 652.929857] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b165e3d0-efb4-4eaf-a7d6-1c4c9099d3ba tempest-ImagesOneServerTestJSON-1002718403 tempest-ImagesOneServerTestJSON-1002718403-project-member] Expecting reply to msg 47a76d4eb8e24cd7a24f293eb0d80a50 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 652.937965] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 47a76d4eb8e24cd7a24f293eb0d80a50 [ 653.245093] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b67d5e24-acb6-4b89-8ad7-369461375b68 tempest-ServerExternalEventsTest-882999065 tempest-ServerExternalEventsTest-882999065-project-member] Expecting reply to msg 60935ca1216047de9842d3e7dce1d417 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 653.252759] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 60935ca1216047de9842d3e7dce1d417 [ 653.260639] env[62096]: DEBUG oslo_concurrency.lockutils [None req-6f2dbda3-b665-4f79-8956-f4f8aac23815 tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Lock "12a4b44d-b6aa-45e2-bba9-d73f41fa4b61" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 24.757s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 653.260980] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-6f2dbda3-b665-4f79-8956-f4f8aac23815 tempest-ServersAaction247Test-1444232804 tempest-ServersAaction247Test-1444232804-project-member] Expecting reply to msg 69992317b9e441029dc614ec9e8bcc9c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 653.271359] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 69992317b9e441029dc614ec9e8bcc9c [ 653.362754] env[62096]: DEBUG nova.compute.manager [None req-5ad75242-2efb-4f8b-a5d0-817319fe309a tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] [instance: 374ca884-8e77-4568-8667-e124e6df4c75] Starting instance... {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 653.364694] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-5ad75242-2efb-4f8b-a5d0-817319fe309a tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Expecting reply to msg 86b7aa86c15a437b9eadcea5b7cef874 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 653.404743] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 86b7aa86c15a437b9eadcea5b7cef874 [ 653.431890] env[62096]: INFO nova.compute.manager [None req-b165e3d0-efb4-4eaf-a7d6-1c4c9099d3ba tempest-ImagesOneServerTestJSON-1002718403 tempest-ImagesOneServerTestJSON-1002718403-project-member] [instance: 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb] Took 1.02 seconds to deallocate network for instance. [ 653.434094] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b165e3d0-efb4-4eaf-a7d6-1c4c9099d3ba tempest-ImagesOneServerTestJSON-1002718403 tempest-ImagesOneServerTestJSON-1002718403-project-member] Expecting reply to msg a353950b0d7842a4a3585cf173d3c116 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 653.466571] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a353950b0d7842a4a3585cf173d3c116 [ 653.889838] env[62096]: DEBUG oslo_concurrency.lockutils [None req-5ad75242-2efb-4f8b-a5d0-817319fe309a tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 653.940060] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b165e3d0-efb4-4eaf-a7d6-1c4c9099d3ba tempest-ImagesOneServerTestJSON-1002718403 tempest-ImagesOneServerTestJSON-1002718403-project-member] Expecting reply to msg 6f190a43920349dda9a4d4c4fc25928e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 653.983469] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6f190a43920349dda9a4d4c4fc25928e [ 654.186793] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c98cc1dd-35ea-4bb6-92db-605407ec7889 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.194617] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1c317f6-4ac8-4747-9eaa-8e91497546d7 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.228963] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f900189-e41d-4bb1-bf61-97be51194cd0 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.236539] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eff06154-c3f8-4022-bb6d-634d58d5aa17 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.250616] env[62096]: DEBUG nova.compute.provider_tree [None req-b67d5e24-acb6-4b89-8ad7-369461375b68 tempest-ServerExternalEventsTest-882999065 tempest-ServerExternalEventsTest-882999065-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 654.251121] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b67d5e24-acb6-4b89-8ad7-369461375b68 tempest-ServerExternalEventsTest-882999065 tempest-ServerExternalEventsTest-882999065-project-member] Expecting reply to msg 7d22272ffe934c30ba91ffecb106dc1b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 654.260238] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7d22272ffe934c30ba91ffecb106dc1b [ 654.466393] env[62096]: INFO nova.scheduler.client.report [None req-b165e3d0-efb4-4eaf-a7d6-1c4c9099d3ba tempest-ImagesOneServerTestJSON-1002718403 tempest-ImagesOneServerTestJSON-1002718403-project-member] Deleted allocations for instance 69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb [ 654.474362] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b165e3d0-efb4-4eaf-a7d6-1c4c9099d3ba tempest-ImagesOneServerTestJSON-1002718403 tempest-ImagesOneServerTestJSON-1002718403-project-member] Expecting reply to msg 972f61f8ce4f49d2b29e7c039d470075 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 654.518381] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 972f61f8ce4f49d2b29e7c039d470075 [ 654.753960] env[62096]: DEBUG nova.scheduler.client.report [None req-b67d5e24-acb6-4b89-8ad7-369461375b68 tempest-ServerExternalEventsTest-882999065 tempest-ServerExternalEventsTest-882999065-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 654.756416] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b67d5e24-acb6-4b89-8ad7-369461375b68 tempest-ServerExternalEventsTest-882999065 tempest-ServerExternalEventsTest-882999065-project-member] Expecting reply to msg 8f6863a6f1fb42a8ac1f407a66f33bef in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 654.770660] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8f6863a6f1fb42a8ac1f407a66f33bef [ 654.978021] env[62096]: DEBUG oslo_concurrency.lockutils [None req-b165e3d0-efb4-4eaf-a7d6-1c4c9099d3ba tempest-ImagesOneServerTestJSON-1002718403 tempest-ImagesOneServerTestJSON-1002718403-project-member] Lock "69dc4e18-d7fd-4a2a-bf1a-fbb68211bcdb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 85.478s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 654.978837] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-7f410855-d894-4223-982d-162ddb3e3fc7 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Expecting reply to msg 988e37ebcac2490fb9cb0dfe53e108bf in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 655.000494] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 988e37ebcac2490fb9cb0dfe53e108bf [ 655.260517] env[62096]: DEBUG oslo_concurrency.lockutils [None req-b67d5e24-acb6-4b89-8ad7-369461375b68 tempest-ServerExternalEventsTest-882999065 tempest-ServerExternalEventsTest-882999065-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.523s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 655.261046] env[62096]: DEBUG nova.compute.manager [None req-b67d5e24-acb6-4b89-8ad7-369461375b68 tempest-ServerExternalEventsTest-882999065 tempest-ServerExternalEventsTest-882999065-project-member] [instance: 62b778aa-71c7-480b-8148-017773246caf] Start building networks asynchronously for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 655.262912] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b67d5e24-acb6-4b89-8ad7-369461375b68 tempest-ServerExternalEventsTest-882999065 tempest-ServerExternalEventsTest-882999065-project-member] Expecting reply to msg 7f0777b73ed24b98aef1d4220fe4d2a9 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 655.264571] env[62096]: DEBUG oslo_concurrency.lockutils [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 18.658s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 655.264776] env[62096]: DEBUG oslo_concurrency.lockutils [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 655.265106] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62096) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 655.265282] env[62096]: DEBUG oslo_concurrency.lockutils [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 18.434s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 655.268237] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Expecting reply to msg d623992ae022441f9c47d5d98454e9de in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 655.270115] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2118e700-4b38-493c-b964-23a6245db1cc {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.278998] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45c65b6e-db79-4a3f-a02d-3d99643581a1 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.293594] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-939e66ba-8f12-4a30-b0e9-0a052a4357d9 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.300240] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-508ece15-efab-409a-b6c2-10838b5bea1e {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.307684] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d623992ae022441f9c47d5d98454e9de [ 655.334003] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7f0777b73ed24b98aef1d4220fe4d2a9 [ 655.334580] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181746MB free_disk=126GB free_vcpus=48 pci_devices=None {{(pid=62096) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 655.334749] env[62096]: DEBUG oslo_concurrency.lockutils [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 655.481409] env[62096]: DEBUG nova.compute.manager [None req-7f410855-d894-4223-982d-162ddb3e3fc7 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] Starting instance... {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 655.483290] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-7f410855-d894-4223-982d-162ddb3e3fc7 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Expecting reply to msg b892dc8ca5a246d49a84b4afd8495ff1 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 655.519255] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b892dc8ca5a246d49a84b4afd8495ff1 [ 655.765992] env[62096]: DEBUG nova.compute.utils [None req-b67d5e24-acb6-4b89-8ad7-369461375b68 tempest-ServerExternalEventsTest-882999065 tempest-ServerExternalEventsTest-882999065-project-member] Using /dev/sd instead of None {{(pid=62096) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 655.766609] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b67d5e24-acb6-4b89-8ad7-369461375b68 tempest-ServerExternalEventsTest-882999065 tempest-ServerExternalEventsTest-882999065-project-member] Expecting reply to msg 2f960fe28f7a43a0aeb8541b692f8006 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 655.767548] env[62096]: DEBUG nova.compute.manager [None req-b67d5e24-acb6-4b89-8ad7-369461375b68 tempest-ServerExternalEventsTest-882999065 tempest-ServerExternalEventsTest-882999065-project-member] [instance: 62b778aa-71c7-480b-8148-017773246caf] Allocating IP information in the background. {{(pid=62096) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 655.768224] env[62096]: DEBUG nova.network.neutron [None req-b67d5e24-acb6-4b89-8ad7-369461375b68 tempest-ServerExternalEventsTest-882999065 tempest-ServerExternalEventsTest-882999065-project-member] [instance: 62b778aa-71c7-480b-8148-017773246caf] allocate_for_instance() {{(pid=62096) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 655.780698] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2f960fe28f7a43a0aeb8541b692f8006 [ 655.896175] env[62096]: DEBUG nova.policy [None req-b67d5e24-acb6-4b89-8ad7-369461375b68 tempest-ServerExternalEventsTest-882999065 tempest-ServerExternalEventsTest-882999065-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ece5926f1f154c2a8849dd8f3916f39c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'aa59f48efaee44f898561a5b3e7db1d7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62096) authorize /opt/stack/nova/nova/policy.py:203}} [ 656.002511] env[62096]: DEBUG oslo_concurrency.lockutils [None req-7f410855-d894-4223-982d-162ddb3e3fc7 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 656.225819] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecd8f7a5-52f4-4cf7-af40-77251c62dbc7 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.235334] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c94448e-e0e3-4514-a909-8423ffda3b75 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.265758] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d3fb32c-a77d-4652-8d94-6e796c6ac712 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.270317] env[62096]: DEBUG nova.compute.manager [None req-b67d5e24-acb6-4b89-8ad7-369461375b68 tempest-ServerExternalEventsTest-882999065 tempest-ServerExternalEventsTest-882999065-project-member] [instance: 62b778aa-71c7-480b-8148-017773246caf] Start building block device mappings for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 656.271969] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b67d5e24-acb6-4b89-8ad7-369461375b68 tempest-ServerExternalEventsTest-882999065 tempest-ServerExternalEventsTest-882999065-project-member] Expecting reply to msg 6f197ffd916b46ceb3dca9736067f86c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 656.278802] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47742e63-5aef-4c40-bd74-1a0d478f8cb3 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.295538] env[62096]: DEBUG nova.compute.provider_tree [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 656.296151] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Expecting reply to msg b1529b0c9e5442918cad2135c37109ab in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 656.305914] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b1529b0c9e5442918cad2135c37109ab [ 656.315153] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6f197ffd916b46ceb3dca9736067f86c [ 656.712204] env[62096]: DEBUG nova.network.neutron [None req-b67d5e24-acb6-4b89-8ad7-369461375b68 tempest-ServerExternalEventsTest-882999065 tempest-ServerExternalEventsTest-882999065-project-member] [instance: 62b778aa-71c7-480b-8148-017773246caf] Successfully created port: a72a7432-c15c-4535-bfe3-9b96b41ec2ce {{(pid=62096) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 656.779467] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b67d5e24-acb6-4b89-8ad7-369461375b68 tempest-ServerExternalEventsTest-882999065 tempest-ServerExternalEventsTest-882999065-project-member] Expecting reply to msg 5da37d6017c741d1b4314c589129dbc4 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 656.806204] env[62096]: DEBUG nova.scheduler.client.report [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 656.809352] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Expecting reply to msg 522ab23e920448fab5aa7b702a9b19b0 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 656.823110] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 522ab23e920448fab5aa7b702a9b19b0 [ 656.826813] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5da37d6017c741d1b4314c589129dbc4 [ 657.287542] env[62096]: DEBUG nova.compute.manager [None req-b67d5e24-acb6-4b89-8ad7-369461375b68 tempest-ServerExternalEventsTest-882999065 tempest-ServerExternalEventsTest-882999065-project-member] [instance: 62b778aa-71c7-480b-8148-017773246caf] Start spawning the instance on the hypervisor. {{(pid=62096) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 657.310851] env[62096]: DEBUG nova.virt.hardware [None req-b67d5e24-acb6-4b89-8ad7-369461375b68 tempest-ServerExternalEventsTest-882999065 tempest-ServerExternalEventsTest-882999065-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-19T11:52:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-19T11:51:55Z,direct_url=,disk_format='vmdk',id=fb764baa-3805-45c4-a694-aa91b0932110,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bd76085d004b408bbcf1ebf76ba2593e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-19T11:51:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 657.311105] env[62096]: DEBUG nova.virt.hardware [None req-b67d5e24-acb6-4b89-8ad7-369461375b68 tempest-ServerExternalEventsTest-882999065 tempest-ServerExternalEventsTest-882999065-project-member] Flavor limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 657.311264] env[62096]: DEBUG nova.virt.hardware [None req-b67d5e24-acb6-4b89-8ad7-369461375b68 tempest-ServerExternalEventsTest-882999065 tempest-ServerExternalEventsTest-882999065-project-member] Image limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 657.311472] env[62096]: DEBUG nova.virt.hardware [None req-b67d5e24-acb6-4b89-8ad7-369461375b68 tempest-ServerExternalEventsTest-882999065 tempest-ServerExternalEventsTest-882999065-project-member] Flavor pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 657.311628] env[62096]: DEBUG nova.virt.hardware [None req-b67d5e24-acb6-4b89-8ad7-369461375b68 tempest-ServerExternalEventsTest-882999065 tempest-ServerExternalEventsTest-882999065-project-member] Image pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 657.311776] env[62096]: DEBUG nova.virt.hardware [None req-b67d5e24-acb6-4b89-8ad7-369461375b68 tempest-ServerExternalEventsTest-882999065 tempest-ServerExternalEventsTest-882999065-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 657.311977] env[62096]: DEBUG nova.virt.hardware [None req-b67d5e24-acb6-4b89-8ad7-369461375b68 tempest-ServerExternalEventsTest-882999065 tempest-ServerExternalEventsTest-882999065-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 657.312210] env[62096]: DEBUG nova.virt.hardware [None req-b67d5e24-acb6-4b89-8ad7-369461375b68 tempest-ServerExternalEventsTest-882999065 tempest-ServerExternalEventsTest-882999065-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 657.312315] env[62096]: DEBUG nova.virt.hardware [None req-b67d5e24-acb6-4b89-8ad7-369461375b68 tempest-ServerExternalEventsTest-882999065 tempest-ServerExternalEventsTest-882999065-project-member] Got 1 possible topologies {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 657.312472] env[62096]: DEBUG nova.virt.hardware [None req-b67d5e24-acb6-4b89-8ad7-369461375b68 tempest-ServerExternalEventsTest-882999065 tempest-ServerExternalEventsTest-882999065-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 657.312637] env[62096]: DEBUG nova.virt.hardware [None req-b67d5e24-acb6-4b89-8ad7-369461375b68 tempest-ServerExternalEventsTest-882999065 tempest-ServerExternalEventsTest-882999065-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 657.313371] env[62096]: DEBUG oslo_concurrency.lockutils [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.048s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 657.313970] env[62096]: ERROR nova.compute.manager [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] [instance: 27d12301-a049-4d1e-b171-a09a642703fb] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port bdb9b731-7251-4c30-a6e1-466fc3302e95, please check neutron logs for more information. [ 657.313970] env[62096]: ERROR nova.compute.manager [instance: 27d12301-a049-4d1e-b171-a09a642703fb] Traceback (most recent call last): [ 657.313970] env[62096]: ERROR nova.compute.manager [instance: 27d12301-a049-4d1e-b171-a09a642703fb] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 657.313970] env[62096]: ERROR nova.compute.manager [instance: 27d12301-a049-4d1e-b171-a09a642703fb] self.driver.spawn(context, instance, image_meta, [ 657.313970] env[62096]: ERROR nova.compute.manager [instance: 27d12301-a049-4d1e-b171-a09a642703fb] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 657.313970] env[62096]: ERROR nova.compute.manager [instance: 27d12301-a049-4d1e-b171-a09a642703fb] self._vmops.spawn(context, instance, image_meta, injected_files, [ 657.313970] env[62096]: ERROR nova.compute.manager [instance: 27d12301-a049-4d1e-b171-a09a642703fb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 657.313970] env[62096]: ERROR nova.compute.manager [instance: 27d12301-a049-4d1e-b171-a09a642703fb] vm_ref = self.build_virtual_machine(instance, [ 657.313970] env[62096]: ERROR nova.compute.manager [instance: 27d12301-a049-4d1e-b171-a09a642703fb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 657.313970] env[62096]: ERROR nova.compute.manager [instance: 27d12301-a049-4d1e-b171-a09a642703fb] vif_infos = vmwarevif.get_vif_info(self._session, [ 657.313970] env[62096]: ERROR nova.compute.manager [instance: 27d12301-a049-4d1e-b171-a09a642703fb] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 657.314348] env[62096]: ERROR nova.compute.manager [instance: 27d12301-a049-4d1e-b171-a09a642703fb] for vif in network_info: [ 657.314348] env[62096]: ERROR nova.compute.manager [instance: 27d12301-a049-4d1e-b171-a09a642703fb] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 657.314348] env[62096]: ERROR nova.compute.manager [instance: 27d12301-a049-4d1e-b171-a09a642703fb] return self._sync_wrapper(fn, *args, **kwargs) [ 657.314348] env[62096]: ERROR nova.compute.manager [instance: 27d12301-a049-4d1e-b171-a09a642703fb] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 657.314348] env[62096]: ERROR nova.compute.manager [instance: 27d12301-a049-4d1e-b171-a09a642703fb] self.wait() [ 657.314348] env[62096]: ERROR nova.compute.manager [instance: 27d12301-a049-4d1e-b171-a09a642703fb] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 657.314348] env[62096]: ERROR nova.compute.manager [instance: 27d12301-a049-4d1e-b171-a09a642703fb] self[:] = self._gt.wait() [ 657.314348] env[62096]: ERROR nova.compute.manager [instance: 27d12301-a049-4d1e-b171-a09a642703fb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 657.314348] env[62096]: ERROR nova.compute.manager [instance: 27d12301-a049-4d1e-b171-a09a642703fb] return self._exit_event.wait() [ 657.314348] env[62096]: ERROR nova.compute.manager [instance: 27d12301-a049-4d1e-b171-a09a642703fb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 657.314348] env[62096]: ERROR nova.compute.manager [instance: 27d12301-a049-4d1e-b171-a09a642703fb] current.throw(*self._exc) [ 657.314348] env[62096]: ERROR nova.compute.manager [instance: 27d12301-a049-4d1e-b171-a09a642703fb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 657.314348] env[62096]: ERROR nova.compute.manager [instance: 27d12301-a049-4d1e-b171-a09a642703fb] result = function(*args, **kwargs) [ 657.314790] env[62096]: ERROR nova.compute.manager [instance: 27d12301-a049-4d1e-b171-a09a642703fb] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 657.314790] env[62096]: ERROR nova.compute.manager [instance: 27d12301-a049-4d1e-b171-a09a642703fb] return func(*args, **kwargs) [ 657.314790] env[62096]: ERROR nova.compute.manager [instance: 27d12301-a049-4d1e-b171-a09a642703fb] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 657.314790] env[62096]: ERROR nova.compute.manager [instance: 27d12301-a049-4d1e-b171-a09a642703fb] raise e [ 657.314790] env[62096]: ERROR nova.compute.manager [instance: 27d12301-a049-4d1e-b171-a09a642703fb] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 657.314790] env[62096]: ERROR nova.compute.manager [instance: 27d12301-a049-4d1e-b171-a09a642703fb] nwinfo = self.network_api.allocate_for_instance( [ 657.314790] env[62096]: ERROR nova.compute.manager [instance: 27d12301-a049-4d1e-b171-a09a642703fb] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 657.314790] env[62096]: ERROR nova.compute.manager [instance: 27d12301-a049-4d1e-b171-a09a642703fb] created_port_ids = self._update_ports_for_instance( [ 657.314790] env[62096]: ERROR nova.compute.manager [instance: 27d12301-a049-4d1e-b171-a09a642703fb] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 657.314790] env[62096]: ERROR nova.compute.manager [instance: 27d12301-a049-4d1e-b171-a09a642703fb] with excutils.save_and_reraise_exception(): [ 657.314790] env[62096]: ERROR nova.compute.manager [instance: 27d12301-a049-4d1e-b171-a09a642703fb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 657.314790] env[62096]: ERROR nova.compute.manager [instance: 27d12301-a049-4d1e-b171-a09a642703fb] self.force_reraise() [ 657.314790] env[62096]: ERROR nova.compute.manager [instance: 27d12301-a049-4d1e-b171-a09a642703fb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 657.315612] env[62096]: ERROR nova.compute.manager [instance: 27d12301-a049-4d1e-b171-a09a642703fb] raise self.value [ 657.315612] env[62096]: ERROR nova.compute.manager [instance: 27d12301-a049-4d1e-b171-a09a642703fb] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 657.315612] env[62096]: ERROR nova.compute.manager [instance: 27d12301-a049-4d1e-b171-a09a642703fb] updated_port = self._update_port( [ 657.315612] env[62096]: ERROR nova.compute.manager [instance: 27d12301-a049-4d1e-b171-a09a642703fb] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 657.315612] env[62096]: ERROR nova.compute.manager [instance: 27d12301-a049-4d1e-b171-a09a642703fb] _ensure_no_port_binding_failure(port) [ 657.315612] env[62096]: ERROR nova.compute.manager [instance: 27d12301-a049-4d1e-b171-a09a642703fb] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 657.315612] env[62096]: ERROR nova.compute.manager [instance: 27d12301-a049-4d1e-b171-a09a642703fb] raise exception.PortBindingFailed(port_id=port['id']) [ 657.315612] env[62096]: ERROR nova.compute.manager [instance: 27d12301-a049-4d1e-b171-a09a642703fb] nova.exception.PortBindingFailed: Binding failed for port bdb9b731-7251-4c30-a6e1-466fc3302e95, please check neutron logs for more information. [ 657.315612] env[62096]: ERROR nova.compute.manager [instance: 27d12301-a049-4d1e-b171-a09a642703fb] [ 657.315612] env[62096]: DEBUG nova.compute.utils [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] [instance: 27d12301-a049-4d1e-b171-a09a642703fb] Binding failed for port bdb9b731-7251-4c30-a6e1-466fc3302e95, please check neutron logs for more information. {{(pid=62096) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 657.316325] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26459ebf-b381-41d4-be25-41ac2d4ca3d4 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.319147] env[62096]: DEBUG nova.compute.manager [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] [instance: 27d12301-a049-4d1e-b171-a09a642703fb] Build of instance 27d12301-a049-4d1e-b171-a09a642703fb was re-scheduled: Binding failed for port bdb9b731-7251-4c30-a6e1-466fc3302e95, please check neutron logs for more information. {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 657.319560] env[62096]: DEBUG nova.compute.manager [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] [instance: 27d12301-a049-4d1e-b171-a09a642703fb] Unplugging VIFs for instance {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 657.319800] env[62096]: DEBUG oslo_concurrency.lockutils [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Acquiring lock "refresh_cache-27d12301-a049-4d1e-b171-a09a642703fb" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 657.319950] env[62096]: DEBUG oslo_concurrency.lockutils [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Acquired lock "refresh_cache-27d12301-a049-4d1e-b171-a09a642703fb" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 657.320133] env[62096]: DEBUG nova.network.neutron [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] [instance: 27d12301-a049-4d1e-b171-a09a642703fb] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 657.320599] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Expecting reply to msg 4e4c413b8b2947168b9942e806153220 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 657.321837] env[62096]: DEBUG oslo_concurrency.lockutils [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 18.740s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 657.323115] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Expecting reply to msg 48e4ac13f7814d18875b1811999bc55f in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 657.338898] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4e4c413b8b2947168b9942e806153220 [ 657.348224] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31c3702d-9fdf-48d4-97f0-9a0e94df6d67 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.376265] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 48e4ac13f7814d18875b1811999bc55f [ 657.797472] env[62096]: DEBUG nova.compute.manager [None req-124d1758-0c44-4285-80be-0bcbeb3a4e49 tempest-ServerDiagnosticsV248Test-489447487 tempest-ServerDiagnosticsV248Test-489447487-project-admin] [instance: 06b9105a-8dcb-4bff-bba2-05e179036f24] Checking state {{(pid=62096) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 657.798583] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-101bd7c3-6f72-4f26-b43a-c8ce27a89957 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.806461] env[62096]: INFO nova.compute.manager [None req-124d1758-0c44-4285-80be-0bcbeb3a4e49 tempest-ServerDiagnosticsV248Test-489447487 tempest-ServerDiagnosticsV248Test-489447487-project-admin] [instance: 06b9105a-8dcb-4bff-bba2-05e179036f24] Retrieving diagnostics [ 657.807220] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90bea4ee-2ba6-4b1a-926d-605a14fac6d9 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.859424] env[62096]: DEBUG nova.network.neutron [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] [instance: 27d12301-a049-4d1e-b171-a09a642703fb] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 657.962237] env[62096]: DEBUG nova.network.neutron [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] [instance: 27d12301-a049-4d1e-b171-a09a642703fb] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 657.962764] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Expecting reply to msg 2b5a74d8aeb749dfbf60daf9ce1e2090 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 657.973369] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2b5a74d8aeb749dfbf60daf9ce1e2090 [ 658.139493] env[62096]: DEBUG oslo_concurrency.lockutils [None req-a3983baa-43a6-408d-8a39-88c7bf1db267 tempest-TenantUsagesTestJSON-1395762051 tempest-TenantUsagesTestJSON-1395762051-project-member] Acquiring lock "e8631ec4-1823-46d2-8553-05e3336fed32" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 658.140943] env[62096]: DEBUG oslo_concurrency.lockutils [None req-a3983baa-43a6-408d-8a39-88c7bf1db267 tempest-TenantUsagesTestJSON-1395762051 tempest-TenantUsagesTestJSON-1395762051-project-member] Lock "e8631ec4-1823-46d2-8553-05e3336fed32" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.002s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 658.309519] env[62096]: ERROR nova.compute.manager [None req-b67d5e24-acb6-4b89-8ad7-369461375b68 tempest-ServerExternalEventsTest-882999065 tempest-ServerExternalEventsTest-882999065-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port a72a7432-c15c-4535-bfe3-9b96b41ec2ce, please check neutron logs for more information. [ 658.309519] env[62096]: ERROR nova.compute.manager Traceback (most recent call last): [ 658.309519] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 658.309519] env[62096]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 658.309519] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 658.309519] env[62096]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 658.309519] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 658.309519] env[62096]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 658.309519] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 658.309519] env[62096]: ERROR nova.compute.manager self.force_reraise() [ 658.309519] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 658.309519] env[62096]: ERROR nova.compute.manager raise self.value [ 658.309519] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 658.309519] env[62096]: ERROR nova.compute.manager updated_port = self._update_port( [ 658.309519] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 658.309519] env[62096]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 658.309962] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 658.309962] env[62096]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 658.309962] env[62096]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port a72a7432-c15c-4535-bfe3-9b96b41ec2ce, please check neutron logs for more information. [ 658.309962] env[62096]: ERROR nova.compute.manager [ 658.309962] env[62096]: Traceback (most recent call last): [ 658.309962] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 658.309962] env[62096]: listener.cb(fileno) [ 658.309962] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 658.309962] env[62096]: result = function(*args, **kwargs) [ 658.309962] env[62096]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 658.309962] env[62096]: return func(*args, **kwargs) [ 658.309962] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 658.309962] env[62096]: raise e [ 658.309962] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 658.309962] env[62096]: nwinfo = self.network_api.allocate_for_instance( [ 658.309962] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 658.309962] env[62096]: created_port_ids = self._update_ports_for_instance( [ 658.309962] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 658.309962] env[62096]: with excutils.save_and_reraise_exception(): [ 658.309962] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 658.309962] env[62096]: self.force_reraise() [ 658.309962] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 658.309962] env[62096]: raise self.value [ 658.309962] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 658.309962] env[62096]: updated_port = self._update_port( [ 658.309962] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 658.309962] env[62096]: _ensure_no_port_binding_failure(port) [ 658.309962] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 658.309962] env[62096]: raise exception.PortBindingFailed(port_id=port['id']) [ 658.310769] env[62096]: nova.exception.PortBindingFailed: Binding failed for port a72a7432-c15c-4535-bfe3-9b96b41ec2ce, please check neutron logs for more information. [ 658.310769] env[62096]: Removing descriptor: 16 [ 658.310769] env[62096]: ERROR nova.compute.manager [None req-b67d5e24-acb6-4b89-8ad7-369461375b68 tempest-ServerExternalEventsTest-882999065 tempest-ServerExternalEventsTest-882999065-project-member] [instance: 62b778aa-71c7-480b-8148-017773246caf] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port a72a7432-c15c-4535-bfe3-9b96b41ec2ce, please check neutron logs for more information. [ 658.310769] env[62096]: ERROR nova.compute.manager [instance: 62b778aa-71c7-480b-8148-017773246caf] Traceback (most recent call last): [ 658.310769] env[62096]: ERROR nova.compute.manager [instance: 62b778aa-71c7-480b-8148-017773246caf] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 658.310769] env[62096]: ERROR nova.compute.manager [instance: 62b778aa-71c7-480b-8148-017773246caf] yield resources [ 658.310769] env[62096]: ERROR nova.compute.manager [instance: 62b778aa-71c7-480b-8148-017773246caf] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 658.310769] env[62096]: ERROR nova.compute.manager [instance: 62b778aa-71c7-480b-8148-017773246caf] self.driver.spawn(context, instance, image_meta, [ 658.310769] env[62096]: ERROR nova.compute.manager [instance: 62b778aa-71c7-480b-8148-017773246caf] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 658.310769] env[62096]: ERROR nova.compute.manager [instance: 62b778aa-71c7-480b-8148-017773246caf] self._vmops.spawn(context, instance, image_meta, injected_files, [ 658.310769] env[62096]: ERROR nova.compute.manager [instance: 62b778aa-71c7-480b-8148-017773246caf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 658.310769] env[62096]: ERROR nova.compute.manager [instance: 62b778aa-71c7-480b-8148-017773246caf] vm_ref = self.build_virtual_machine(instance, [ 658.311082] env[62096]: ERROR nova.compute.manager [instance: 62b778aa-71c7-480b-8148-017773246caf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 658.311082] env[62096]: ERROR nova.compute.manager [instance: 62b778aa-71c7-480b-8148-017773246caf] vif_infos = vmwarevif.get_vif_info(self._session, [ 658.311082] env[62096]: ERROR nova.compute.manager [instance: 62b778aa-71c7-480b-8148-017773246caf] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 658.311082] env[62096]: ERROR nova.compute.manager [instance: 62b778aa-71c7-480b-8148-017773246caf] for vif in network_info: [ 658.311082] env[62096]: ERROR nova.compute.manager [instance: 62b778aa-71c7-480b-8148-017773246caf] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 658.311082] env[62096]: ERROR nova.compute.manager [instance: 62b778aa-71c7-480b-8148-017773246caf] return self._sync_wrapper(fn, *args, **kwargs) [ 658.311082] env[62096]: ERROR nova.compute.manager [instance: 62b778aa-71c7-480b-8148-017773246caf] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 658.311082] env[62096]: ERROR nova.compute.manager [instance: 62b778aa-71c7-480b-8148-017773246caf] self.wait() [ 658.311082] env[62096]: ERROR nova.compute.manager [instance: 62b778aa-71c7-480b-8148-017773246caf] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 658.311082] env[62096]: ERROR nova.compute.manager [instance: 62b778aa-71c7-480b-8148-017773246caf] self[:] = self._gt.wait() [ 658.311082] env[62096]: ERROR nova.compute.manager [instance: 62b778aa-71c7-480b-8148-017773246caf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 658.311082] env[62096]: ERROR nova.compute.manager [instance: 62b778aa-71c7-480b-8148-017773246caf] return self._exit_event.wait() [ 658.311082] env[62096]: ERROR nova.compute.manager [instance: 62b778aa-71c7-480b-8148-017773246caf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 658.311408] env[62096]: ERROR nova.compute.manager [instance: 62b778aa-71c7-480b-8148-017773246caf] result = hub.switch() [ 658.311408] env[62096]: ERROR nova.compute.manager [instance: 62b778aa-71c7-480b-8148-017773246caf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 658.311408] env[62096]: ERROR nova.compute.manager [instance: 62b778aa-71c7-480b-8148-017773246caf] return self.greenlet.switch() [ 658.311408] env[62096]: ERROR nova.compute.manager [instance: 62b778aa-71c7-480b-8148-017773246caf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 658.311408] env[62096]: ERROR nova.compute.manager [instance: 62b778aa-71c7-480b-8148-017773246caf] result = function(*args, **kwargs) [ 658.311408] env[62096]: ERROR nova.compute.manager [instance: 62b778aa-71c7-480b-8148-017773246caf] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 658.311408] env[62096]: ERROR nova.compute.manager [instance: 62b778aa-71c7-480b-8148-017773246caf] return func(*args, **kwargs) [ 658.311408] env[62096]: ERROR nova.compute.manager [instance: 62b778aa-71c7-480b-8148-017773246caf] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 658.311408] env[62096]: ERROR nova.compute.manager [instance: 62b778aa-71c7-480b-8148-017773246caf] raise e [ 658.311408] env[62096]: ERROR nova.compute.manager [instance: 62b778aa-71c7-480b-8148-017773246caf] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 658.311408] env[62096]: ERROR nova.compute.manager [instance: 62b778aa-71c7-480b-8148-017773246caf] nwinfo = self.network_api.allocate_for_instance( [ 658.311408] env[62096]: ERROR nova.compute.manager [instance: 62b778aa-71c7-480b-8148-017773246caf] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 658.311408] env[62096]: ERROR nova.compute.manager [instance: 62b778aa-71c7-480b-8148-017773246caf] created_port_ids = self._update_ports_for_instance( [ 658.311729] env[62096]: ERROR nova.compute.manager [instance: 62b778aa-71c7-480b-8148-017773246caf] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 658.311729] env[62096]: ERROR nova.compute.manager [instance: 62b778aa-71c7-480b-8148-017773246caf] with excutils.save_and_reraise_exception(): [ 658.311729] env[62096]: ERROR nova.compute.manager [instance: 62b778aa-71c7-480b-8148-017773246caf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 658.311729] env[62096]: ERROR nova.compute.manager [instance: 62b778aa-71c7-480b-8148-017773246caf] self.force_reraise() [ 658.311729] env[62096]: ERROR nova.compute.manager [instance: 62b778aa-71c7-480b-8148-017773246caf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 658.311729] env[62096]: ERROR nova.compute.manager [instance: 62b778aa-71c7-480b-8148-017773246caf] raise self.value [ 658.311729] env[62096]: ERROR nova.compute.manager [instance: 62b778aa-71c7-480b-8148-017773246caf] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 658.311729] env[62096]: ERROR nova.compute.manager [instance: 62b778aa-71c7-480b-8148-017773246caf] updated_port = self._update_port( [ 658.311729] env[62096]: ERROR nova.compute.manager [instance: 62b778aa-71c7-480b-8148-017773246caf] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 658.311729] env[62096]: ERROR nova.compute.manager [instance: 62b778aa-71c7-480b-8148-017773246caf] _ensure_no_port_binding_failure(port) [ 658.311729] env[62096]: ERROR nova.compute.manager [instance: 62b778aa-71c7-480b-8148-017773246caf] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 658.311729] env[62096]: ERROR nova.compute.manager [instance: 62b778aa-71c7-480b-8148-017773246caf] raise exception.PortBindingFailed(port_id=port['id']) [ 658.312076] env[62096]: ERROR nova.compute.manager [instance: 62b778aa-71c7-480b-8148-017773246caf] nova.exception.PortBindingFailed: Binding failed for port a72a7432-c15c-4535-bfe3-9b96b41ec2ce, please check neutron logs for more information. [ 658.312076] env[62096]: ERROR nova.compute.manager [instance: 62b778aa-71c7-480b-8148-017773246caf] [ 658.312076] env[62096]: INFO nova.compute.manager [None req-b67d5e24-acb6-4b89-8ad7-369461375b68 tempest-ServerExternalEventsTest-882999065 tempest-ServerExternalEventsTest-882999065-project-member] [instance: 62b778aa-71c7-480b-8148-017773246caf] Terminating instance [ 658.313208] env[62096]: DEBUG oslo_concurrency.lockutils [None req-b67d5e24-acb6-4b89-8ad7-369461375b68 tempest-ServerExternalEventsTest-882999065 tempest-ServerExternalEventsTest-882999065-project-member] Acquiring lock "refresh_cache-62b778aa-71c7-480b-8148-017773246caf" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 658.313208] env[62096]: DEBUG oslo_concurrency.lockutils [None req-b67d5e24-acb6-4b89-8ad7-369461375b68 tempest-ServerExternalEventsTest-882999065 tempest-ServerExternalEventsTest-882999065-project-member] Acquired lock "refresh_cache-62b778aa-71c7-480b-8148-017773246caf" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 658.313208] env[62096]: DEBUG nova.network.neutron [None req-b67d5e24-acb6-4b89-8ad7-369461375b68 tempest-ServerExternalEventsTest-882999065 tempest-ServerExternalEventsTest-882999065-project-member] [instance: 62b778aa-71c7-480b-8148-017773246caf] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 658.313934] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b67d5e24-acb6-4b89-8ad7-369461375b68 tempest-ServerExternalEventsTest-882999065 tempest-ServerExternalEventsTest-882999065-project-member] Expecting reply to msg 873c1463f04847d7b8af332236658e36 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 658.320485] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 873c1463f04847d7b8af332236658e36 [ 658.374492] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9567348-fe91-40cc-82a1-54b0f55f0c4b {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.382551] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ff0ac53-22d1-469d-9ed0-121da210a5c2 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.417396] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c75848a4-1e3c-461a-8f92-a3996dd94e49 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.425312] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d6db05a-45ce-40bc-a0ba-5f6a45d95a41 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.440483] env[62096]: DEBUG nova.compute.provider_tree [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 658.440986] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Expecting reply to msg 7cebb65332df4377bfd13351fb4b1fc5 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 658.450687] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7cebb65332df4377bfd13351fb4b1fc5 [ 658.465565] env[62096]: DEBUG oslo_concurrency.lockutils [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Releasing lock "refresh_cache-27d12301-a049-4d1e-b171-a09a642703fb" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 658.465799] env[62096]: DEBUG nova.compute.manager [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 658.465990] env[62096]: DEBUG nova.compute.manager [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] [instance: 27d12301-a049-4d1e-b171-a09a642703fb] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 658.466157] env[62096]: DEBUG nova.network.neutron [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] [instance: 27d12301-a049-4d1e-b171-a09a642703fb] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 658.493495] env[62096]: DEBUG nova.network.neutron [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] [instance: 27d12301-a049-4d1e-b171-a09a642703fb] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 658.494181] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Expecting reply to msg 975c6fe70bd54b78b06bc6ccb3849f4d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 658.501076] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 975c6fe70bd54b78b06bc6ccb3849f4d [ 658.559769] env[62096]: DEBUG nova.compute.manager [req-d1eaadae-5cc8-4cca-885b-9a9b2334def3 req-38ab5a7c-a8a4-4ce9-8f92-10086bec9b98 service nova] [instance: 62b778aa-71c7-480b-8148-017773246caf] Received event network-changed-a72a7432-c15c-4535-bfe3-9b96b41ec2ce {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 658.559971] env[62096]: DEBUG nova.compute.manager [req-d1eaadae-5cc8-4cca-885b-9a9b2334def3 req-38ab5a7c-a8a4-4ce9-8f92-10086bec9b98 service nova] [instance: 62b778aa-71c7-480b-8148-017773246caf] Refreshing instance network info cache due to event network-changed-a72a7432-c15c-4535-bfe3-9b96b41ec2ce. {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 658.560226] env[62096]: DEBUG oslo_concurrency.lockutils [req-d1eaadae-5cc8-4cca-885b-9a9b2334def3 req-38ab5a7c-a8a4-4ce9-8f92-10086bec9b98 service nova] Acquiring lock "refresh_cache-62b778aa-71c7-480b-8148-017773246caf" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 658.685982] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-70f080d3-692d-46fb-a741-1ba8c3b3362d tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] Expecting reply to msg 39621a2709404d2994ebf44d02156688 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 658.697176] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 39621a2709404d2994ebf44d02156688 [ 658.847362] env[62096]: DEBUG nova.network.neutron [None req-b67d5e24-acb6-4b89-8ad7-369461375b68 tempest-ServerExternalEventsTest-882999065 tempest-ServerExternalEventsTest-882999065-project-member] [instance: 62b778aa-71c7-480b-8148-017773246caf] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 658.951811] env[62096]: DEBUG nova.scheduler.client.report [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 658.954315] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Expecting reply to msg baca952ade0a456ca61e2278c84c83e0 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 658.968406] env[62096]: DEBUG nova.network.neutron [None req-b67d5e24-acb6-4b89-8ad7-369461375b68 tempest-ServerExternalEventsTest-882999065 tempest-ServerExternalEventsTest-882999065-project-member] [instance: 62b778aa-71c7-480b-8148-017773246caf] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 658.968406] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b67d5e24-acb6-4b89-8ad7-369461375b68 tempest-ServerExternalEventsTest-882999065 tempest-ServerExternalEventsTest-882999065-project-member] Expecting reply to msg 1a0aa83417494cb39a9a0ed423880696 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 658.980125] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1a0aa83417494cb39a9a0ed423880696 [ 658.980699] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg baca952ade0a456ca61e2278c84c83e0 [ 658.997420] env[62096]: DEBUG nova.network.neutron [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] [instance: 27d12301-a049-4d1e-b171-a09a642703fb] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 658.997420] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Expecting reply to msg a56fe47dd42e463db3381c172e0afd3a in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 659.005180] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a56fe47dd42e463db3381c172e0afd3a [ 659.191096] env[62096]: DEBUG oslo_concurrency.lockutils [None req-70f080d3-692d-46fb-a741-1ba8c3b3362d tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] Acquiring lock "06b9105a-8dcb-4bff-bba2-05e179036f24" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 659.191096] env[62096]: DEBUG oslo_concurrency.lockutils [None req-70f080d3-692d-46fb-a741-1ba8c3b3362d tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] Lock "06b9105a-8dcb-4bff-bba2-05e179036f24" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 659.191096] env[62096]: DEBUG oslo_concurrency.lockutils [None req-70f080d3-692d-46fb-a741-1ba8c3b3362d tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] Acquiring lock "06b9105a-8dcb-4bff-bba2-05e179036f24-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 659.191096] env[62096]: DEBUG oslo_concurrency.lockutils [None req-70f080d3-692d-46fb-a741-1ba8c3b3362d tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] Lock "06b9105a-8dcb-4bff-bba2-05e179036f24-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 659.191355] env[62096]: DEBUG oslo_concurrency.lockutils [None req-70f080d3-692d-46fb-a741-1ba8c3b3362d tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] Lock "06b9105a-8dcb-4bff-bba2-05e179036f24-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 659.191355] env[62096]: INFO nova.compute.manager [None req-70f080d3-692d-46fb-a741-1ba8c3b3362d tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] [instance: 06b9105a-8dcb-4bff-bba2-05e179036f24] Terminating instance [ 659.193187] env[62096]: DEBUG oslo_concurrency.lockutils [None req-70f080d3-692d-46fb-a741-1ba8c3b3362d tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] Acquiring lock "refresh_cache-06b9105a-8dcb-4bff-bba2-05e179036f24" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 659.193187] env[62096]: DEBUG oslo_concurrency.lockutils [None req-70f080d3-692d-46fb-a741-1ba8c3b3362d tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] Acquired lock "refresh_cache-06b9105a-8dcb-4bff-bba2-05e179036f24" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 659.193187] env[62096]: DEBUG nova.network.neutron [None req-70f080d3-692d-46fb-a741-1ba8c3b3362d tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] [instance: 06b9105a-8dcb-4bff-bba2-05e179036f24] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 659.193513] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-70f080d3-692d-46fb-a741-1ba8c3b3362d tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] Expecting reply to msg 7217a5d6a4e94b3ab63aed4fde060160 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 659.199885] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7217a5d6a4e94b3ab63aed4fde060160 [ 659.457599] env[62096]: DEBUG oslo_concurrency.lockutils [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.136s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 659.458280] env[62096]: ERROR nova.compute.manager [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 285d5c29-d148-4abd-8630-12f7a9c6abf5, please check neutron logs for more information. [ 659.458280] env[62096]: ERROR nova.compute.manager [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] Traceback (most recent call last): [ 659.458280] env[62096]: ERROR nova.compute.manager [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 659.458280] env[62096]: ERROR nova.compute.manager [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] self.driver.spawn(context, instance, image_meta, [ 659.458280] env[62096]: ERROR nova.compute.manager [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 659.458280] env[62096]: ERROR nova.compute.manager [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] self._vmops.spawn(context, instance, image_meta, injected_files, [ 659.458280] env[62096]: ERROR nova.compute.manager [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 659.458280] env[62096]: ERROR nova.compute.manager [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] vm_ref = self.build_virtual_machine(instance, [ 659.458280] env[62096]: ERROR nova.compute.manager [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 659.458280] env[62096]: ERROR nova.compute.manager [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] vif_infos = vmwarevif.get_vif_info(self._session, [ 659.458280] env[62096]: ERROR nova.compute.manager [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 659.458600] env[62096]: ERROR nova.compute.manager [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] for vif in network_info: [ 659.458600] env[62096]: ERROR nova.compute.manager [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 659.458600] env[62096]: ERROR nova.compute.manager [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] return self._sync_wrapper(fn, *args, **kwargs) [ 659.458600] env[62096]: ERROR nova.compute.manager [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 659.458600] env[62096]: ERROR nova.compute.manager [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] self.wait() [ 659.458600] env[62096]: ERROR nova.compute.manager [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 659.458600] env[62096]: ERROR nova.compute.manager [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] self[:] = self._gt.wait() [ 659.458600] env[62096]: ERROR nova.compute.manager [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 659.458600] env[62096]: ERROR nova.compute.manager [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] return self._exit_event.wait() [ 659.458600] env[62096]: ERROR nova.compute.manager [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 659.458600] env[62096]: ERROR nova.compute.manager [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] result = hub.switch() [ 659.458600] env[62096]: ERROR nova.compute.manager [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 659.458600] env[62096]: ERROR nova.compute.manager [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] return self.greenlet.switch() [ 659.458947] env[62096]: ERROR nova.compute.manager [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 659.458947] env[62096]: ERROR nova.compute.manager [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] result = function(*args, **kwargs) [ 659.458947] env[62096]: ERROR nova.compute.manager [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 659.458947] env[62096]: ERROR nova.compute.manager [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] return func(*args, **kwargs) [ 659.458947] env[62096]: ERROR nova.compute.manager [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 659.458947] env[62096]: ERROR nova.compute.manager [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] raise e [ 659.458947] env[62096]: ERROR nova.compute.manager [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 659.458947] env[62096]: ERROR nova.compute.manager [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] nwinfo = self.network_api.allocate_for_instance( [ 659.458947] env[62096]: ERROR nova.compute.manager [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 659.458947] env[62096]: ERROR nova.compute.manager [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] created_port_ids = self._update_ports_for_instance( [ 659.458947] env[62096]: ERROR nova.compute.manager [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 659.458947] env[62096]: ERROR nova.compute.manager [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] with excutils.save_and_reraise_exception(): [ 659.458947] env[62096]: ERROR nova.compute.manager [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 659.459292] env[62096]: ERROR nova.compute.manager [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] self.force_reraise() [ 659.459292] env[62096]: ERROR nova.compute.manager [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 659.459292] env[62096]: ERROR nova.compute.manager [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] raise self.value [ 659.459292] env[62096]: ERROR nova.compute.manager [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 659.459292] env[62096]: ERROR nova.compute.manager [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] updated_port = self._update_port( [ 659.459292] env[62096]: ERROR nova.compute.manager [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 659.459292] env[62096]: ERROR nova.compute.manager [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] _ensure_no_port_binding_failure(port) [ 659.459292] env[62096]: ERROR nova.compute.manager [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 659.459292] env[62096]: ERROR nova.compute.manager [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] raise exception.PortBindingFailed(port_id=port['id']) [ 659.459292] env[62096]: ERROR nova.compute.manager [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] nova.exception.PortBindingFailed: Binding failed for port 285d5c29-d148-4abd-8630-12f7a9c6abf5, please check neutron logs for more information. [ 659.459292] env[62096]: ERROR nova.compute.manager [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] [ 659.459590] env[62096]: DEBUG nova.compute.utils [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] Binding failed for port 285d5c29-d148-4abd-8630-12f7a9c6abf5, please check neutron logs for more information. {{(pid=62096) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 659.460226] env[62096]: DEBUG oslo_concurrency.lockutils [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 18.557s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 659.462420] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Expecting reply to msg 1af758a40bb845a895c4d3f76973f3cc in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 659.464063] env[62096]: DEBUG nova.compute.manager [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] Build of instance d9562762-52de-4a0c-b6a2-2aeaa20e47a0 was re-scheduled: Binding failed for port 285d5c29-d148-4abd-8630-12f7a9c6abf5, please check neutron logs for more information. {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 659.464511] env[62096]: DEBUG nova.compute.manager [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] Unplugging VIFs for instance {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 659.464730] env[62096]: DEBUG oslo_concurrency.lockutils [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Acquiring lock "refresh_cache-d9562762-52de-4a0c-b6a2-2aeaa20e47a0" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 659.464929] env[62096]: DEBUG oslo_concurrency.lockutils [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Acquired lock "refresh_cache-d9562762-52de-4a0c-b6a2-2aeaa20e47a0" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 659.465114] env[62096]: DEBUG nova.network.neutron [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 659.465479] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Expecting reply to msg b069a618e431494e9fe49e132580c24c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 659.471504] env[62096]: DEBUG oslo_concurrency.lockutils [None req-b67d5e24-acb6-4b89-8ad7-369461375b68 tempest-ServerExternalEventsTest-882999065 tempest-ServerExternalEventsTest-882999065-project-member] Releasing lock "refresh_cache-62b778aa-71c7-480b-8148-017773246caf" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 659.471504] env[62096]: DEBUG nova.compute.manager [None req-b67d5e24-acb6-4b89-8ad7-369461375b68 tempest-ServerExternalEventsTest-882999065 tempest-ServerExternalEventsTest-882999065-project-member] [instance: 62b778aa-71c7-480b-8148-017773246caf] Start destroying the instance on the hypervisor. {{(pid=62096) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 659.471504] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-b67d5e24-acb6-4b89-8ad7-369461375b68 tempest-ServerExternalEventsTest-882999065 tempest-ServerExternalEventsTest-882999065-project-member] [instance: 62b778aa-71c7-480b-8148-017773246caf] Destroying instance {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 659.471504] env[62096]: DEBUG oslo_concurrency.lockutils [req-d1eaadae-5cc8-4cca-885b-9a9b2334def3 req-38ab5a7c-a8a4-4ce9-8f92-10086bec9b98 service nova] Acquired lock "refresh_cache-62b778aa-71c7-480b-8148-017773246caf" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 659.471504] env[62096]: DEBUG nova.network.neutron [req-d1eaadae-5cc8-4cca-885b-9a9b2334def3 req-38ab5a7c-a8a4-4ce9-8f92-10086bec9b98 service nova] [instance: 62b778aa-71c7-480b-8148-017773246caf] Refreshing network info cache for port a72a7432-c15c-4535-bfe3-9b96b41ec2ce {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 659.471670] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-d1eaadae-5cc8-4cca-885b-9a9b2334def3 req-38ab5a7c-a8a4-4ce9-8f92-10086bec9b98 service nova] Expecting reply to msg f24b42805ec84d5c99c4c78ae5b4d5c6 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 659.471670] env[62096]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d9d14226-8020-46d2-8eb6-b0d7273d41c1 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.474736] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b069a618e431494e9fe49e132580c24c [ 659.483200] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a15f222-b28c-4bab-91b9-453fdb1f101a {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.496517] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f24b42805ec84d5c99c4c78ae5b4d5c6 [ 659.498950] env[62096]: INFO nova.compute.manager [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] [instance: 27d12301-a049-4d1e-b171-a09a642703fb] Took 1.03 seconds to deallocate network for instance. [ 659.500592] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Expecting reply to msg 7f365ae937fa4bd583c4dd65f39689a6 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 659.508384] env[62096]: WARNING nova.virt.vmwareapi.vmops [None req-b67d5e24-acb6-4b89-8ad7-369461375b68 tempest-ServerExternalEventsTest-882999065 tempest-ServerExternalEventsTest-882999065-project-member] [instance: 62b778aa-71c7-480b-8148-017773246caf] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 62b778aa-71c7-480b-8148-017773246caf could not be found. [ 659.509231] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-b67d5e24-acb6-4b89-8ad7-369461375b68 tempest-ServerExternalEventsTest-882999065 tempest-ServerExternalEventsTest-882999065-project-member] [instance: 62b778aa-71c7-480b-8148-017773246caf] Instance destroyed {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 659.509231] env[62096]: INFO nova.compute.manager [None req-b67d5e24-acb6-4b89-8ad7-369461375b68 tempest-ServerExternalEventsTest-882999065 tempest-ServerExternalEventsTest-882999065-project-member] [instance: 62b778aa-71c7-480b-8148-017773246caf] Took 0.04 seconds to destroy the instance on the hypervisor. [ 659.509231] env[62096]: DEBUG oslo.service.loopingcall [None req-b67d5e24-acb6-4b89-8ad7-369461375b68 tempest-ServerExternalEventsTest-882999065 tempest-ServerExternalEventsTest-882999065-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 659.509231] env[62096]: DEBUG nova.compute.manager [-] [instance: 62b778aa-71c7-480b-8148-017773246caf] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 659.509432] env[62096]: DEBUG nova.network.neutron [-] [instance: 62b778aa-71c7-480b-8148-017773246caf] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 659.511219] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1af758a40bb845a895c4d3f76973f3cc [ 659.537091] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7f365ae937fa4bd583c4dd65f39689a6 [ 659.541527] env[62096]: DEBUG nova.network.neutron [-] [instance: 62b778aa-71c7-480b-8148-017773246caf] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 659.542026] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 9efb63b93e6346ad86dd90673b76094f in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 659.548461] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9efb63b93e6346ad86dd90673b76094f [ 659.723377] env[62096]: DEBUG nova.network.neutron [None req-70f080d3-692d-46fb-a741-1ba8c3b3362d tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] [instance: 06b9105a-8dcb-4bff-bba2-05e179036f24] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 659.957776] env[62096]: DEBUG nova.network.neutron [None req-70f080d3-692d-46fb-a741-1ba8c3b3362d tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] [instance: 06b9105a-8dcb-4bff-bba2-05e179036f24] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 659.958322] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-70f080d3-692d-46fb-a741-1ba8c3b3362d tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] Expecting reply to msg 616209e19e394fd4a680c281eef8959a in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 659.971665] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 616209e19e394fd4a680c281eef8959a [ 659.985605] env[62096]: DEBUG nova.network.neutron [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 660.005871] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Expecting reply to msg ab4376df2026431f8ce29ba9ec8e73f2 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 660.008893] env[62096]: DEBUG nova.network.neutron [req-d1eaadae-5cc8-4cca-885b-9a9b2334def3 req-38ab5a7c-a8a4-4ce9-8f92-10086bec9b98 service nova] [instance: 62b778aa-71c7-480b-8148-017773246caf] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 660.042169] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ab4376df2026431f8ce29ba9ec8e73f2 [ 660.043776] env[62096]: DEBUG nova.network.neutron [-] [instance: 62b778aa-71c7-480b-8148-017773246caf] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 660.044262] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg c1b4a9d629604b22a8ec0fc01429d96e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 660.057144] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c1b4a9d629604b22a8ec0fc01429d96e [ 660.087710] env[62096]: DEBUG nova.network.neutron [req-d1eaadae-5cc8-4cca-885b-9a9b2334def3 req-38ab5a7c-a8a4-4ce9-8f92-10086bec9b98 service nova] [instance: 62b778aa-71c7-480b-8148-017773246caf] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 660.088270] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-d1eaadae-5cc8-4cca-885b-9a9b2334def3 req-38ab5a7c-a8a4-4ce9-8f92-10086bec9b98 service nova] Expecting reply to msg 0acc652de6bc4539abe023e57e969d99 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 660.096792] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0acc652de6bc4539abe023e57e969d99 [ 660.105722] env[62096]: DEBUG nova.network.neutron [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 660.106227] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Expecting reply to msg cbdc5f39d69342ed8321db5a1fed0602 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 660.114527] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cbdc5f39d69342ed8321db5a1fed0602 [ 660.404861] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d086503-b212-4ee4-ad1d-87c0352a9b46 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.412798] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d229af73-1a15-4195-868d-e369c7e487f4 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.461586] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2e4e600-e148-4884-9909-ee3a2564df48 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.463443] env[62096]: DEBUG oslo_concurrency.lockutils [None req-70f080d3-692d-46fb-a741-1ba8c3b3362d tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] Releasing lock "refresh_cache-06b9105a-8dcb-4bff-bba2-05e179036f24" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 660.463856] env[62096]: DEBUG nova.compute.manager [None req-70f080d3-692d-46fb-a741-1ba8c3b3362d tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] [instance: 06b9105a-8dcb-4bff-bba2-05e179036f24] Start destroying the instance on the hypervisor. {{(pid=62096) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 660.464086] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-70f080d3-692d-46fb-a741-1ba8c3b3362d tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] [instance: 06b9105a-8dcb-4bff-bba2-05e179036f24] Destroying instance {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 660.464821] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17ffef9a-cf15-45f2-83ef-42fc4daf671e {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.473999] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7c2f548-12f7-4e52-b7c9-4cdcc2fc3743 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.477763] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [None req-70f080d3-692d-46fb-a741-1ba8c3b3362d tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] [instance: 06b9105a-8dcb-4bff-bba2-05e179036f24] Powering off the VM {{(pid=62096) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 660.478079] env[62096]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b083581d-63cc-40b3-a43d-93207b712da5 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.490673] env[62096]: DEBUG nova.compute.provider_tree [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 660.491213] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Expecting reply to msg bb0ddfc0158b41bd9fc1ecc1a64fc605 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 660.493229] env[62096]: DEBUG oslo_vmware.api [None req-70f080d3-692d-46fb-a741-1ba8c3b3362d tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] Waiting for the task: (returnval){ [ 660.493229] env[62096]: value = "task-397387" [ 660.493229] env[62096]: _type = "Task" [ 660.493229] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 660.501136] env[62096]: DEBUG oslo_vmware.api [None req-70f080d3-692d-46fb-a741-1ba8c3b3362d tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] Task: {'id': task-397387, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 660.501700] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bb0ddfc0158b41bd9fc1ecc1a64fc605 [ 660.534072] env[62096]: INFO nova.scheduler.client.report [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Deleted allocations for instance 27d12301-a049-4d1e-b171-a09a642703fb [ 660.541608] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Expecting reply to msg 79fe2f5d9fd14d2f94780a2531b3b0e7 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 660.547442] env[62096]: INFO nova.compute.manager [-] [instance: 62b778aa-71c7-480b-8148-017773246caf] Took 1.04 seconds to deallocate network for instance. [ 660.550194] env[62096]: DEBUG nova.compute.claims [None req-b67d5e24-acb6-4b89-8ad7-369461375b68 tempest-ServerExternalEventsTest-882999065 tempest-ServerExternalEventsTest-882999065-project-member] [instance: 62b778aa-71c7-480b-8148-017773246caf] Aborting claim: {{(pid=62096) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 660.550867] env[62096]: DEBUG oslo_concurrency.lockutils [None req-b67d5e24-acb6-4b89-8ad7-369461375b68 tempest-ServerExternalEventsTest-882999065 tempest-ServerExternalEventsTest-882999065-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 660.570767] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 79fe2f5d9fd14d2f94780a2531b3b0e7 [ 660.593857] env[62096]: DEBUG oslo_concurrency.lockutils [req-d1eaadae-5cc8-4cca-885b-9a9b2334def3 req-38ab5a7c-a8a4-4ce9-8f92-10086bec9b98 service nova] Releasing lock "refresh_cache-62b778aa-71c7-480b-8148-017773246caf" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 660.612863] env[62096]: DEBUG oslo_concurrency.lockutils [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Releasing lock "refresh_cache-d9562762-52de-4a0c-b6a2-2aeaa20e47a0" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 660.613591] env[62096]: DEBUG nova.compute.manager [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 660.613989] env[62096]: DEBUG nova.compute.manager [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 660.614286] env[62096]: DEBUG nova.network.neutron [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 660.641128] env[62096]: DEBUG nova.network.neutron [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 660.641989] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Expecting reply to msg 8b3f9252f29b40dbb0f06352b12198ba in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 660.652822] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8b3f9252f29b40dbb0f06352b12198ba [ 660.680619] env[62096]: DEBUG oslo_concurrency.lockutils [None req-2fec121e-bb5d-41f2-9f13-5023c9def8ac tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Acquiring lock "f050f0de-dc84-4825-b490-eafe522354cd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 660.680884] env[62096]: DEBUG oslo_concurrency.lockutils [None req-2fec121e-bb5d-41f2-9f13-5023c9def8ac tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Lock "f050f0de-dc84-4825-b490-eafe522354cd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 660.730071] env[62096]: DEBUG nova.compute.manager [req-59faac41-a896-47a0-aedb-a1fb13066812 req-7ba22327-5c8f-49ec-9e4d-62a8bb065185 service nova] [instance: 62b778aa-71c7-480b-8148-017773246caf] Received event network-vif-deleted-a72a7432-c15c-4535-bfe3-9b96b41ec2ce {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 660.995537] env[62096]: DEBUG nova.scheduler.client.report [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 660.998007] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Expecting reply to msg 1d86407ec24e40f3a9a7cea2c0ee83ba in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 661.008996] env[62096]: DEBUG oslo_vmware.api [None req-70f080d3-692d-46fb-a741-1ba8c3b3362d tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] Task: {'id': task-397387, 'name': PowerOffVM_Task, 'duration_secs': 0.123358} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 661.009268] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [None req-70f080d3-692d-46fb-a741-1ba8c3b3362d tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] [instance: 06b9105a-8dcb-4bff-bba2-05e179036f24] Powered off the VM {{(pid=62096) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 661.009450] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-70f080d3-692d-46fb-a741-1ba8c3b3362d tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] [instance: 06b9105a-8dcb-4bff-bba2-05e179036f24] Unregistering the VM {{(pid=62096) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 661.009691] env[62096]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2f769e7c-0115-4a11-9053-2cec6a72f524 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.011489] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1d86407ec24e40f3a9a7cea2c0ee83ba [ 661.035811] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-70f080d3-692d-46fb-a741-1ba8c3b3362d tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] [instance: 06b9105a-8dcb-4bff-bba2-05e179036f24] Unregistered the VM {{(pid=62096) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 661.036973] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-70f080d3-692d-46fb-a741-1ba8c3b3362d tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] [instance: 06b9105a-8dcb-4bff-bba2-05e179036f24] Deleting contents of the VM from datastore datastore2 {{(pid=62096) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 661.036973] env[62096]: DEBUG nova.virt.vmwareapi.ds_util [None req-70f080d3-692d-46fb-a741-1ba8c3b3362d tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] Deleting the datastore file [datastore2] 06b9105a-8dcb-4bff-bba2-05e179036f24 {{(pid=62096) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 661.036973] env[62096]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ee1ad17c-b6a7-45cc-b2fd-aaf94bf5913a {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.044306] env[62096]: DEBUG oslo_vmware.api [None req-70f080d3-692d-46fb-a741-1ba8c3b3362d tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] Waiting for the task: (returnval){ [ 661.044306] env[62096]: value = "task-397389" [ 661.044306] env[62096]: _type = "Task" [ 661.044306] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 661.046288] env[62096]: DEBUG oslo_concurrency.lockutils [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Lock "27d12301-a049-4d1e-b171-a09a642703fb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 89.320s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 661.046591] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-fdc6c660-999b-45f5-b0c3-7b576c288147 tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Expecting reply to msg 2e5015ea49764746a59dcfe77bd287f8 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 661.053159] env[62096]: DEBUG oslo_vmware.api [None req-70f080d3-692d-46fb-a741-1ba8c3b3362d tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] Task: {'id': task-397389, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.057472] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2e5015ea49764746a59dcfe77bd287f8 [ 661.144376] env[62096]: DEBUG nova.network.neutron [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 661.144921] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Expecting reply to msg d74e559478ce4757aae216d70db0f375 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 661.163165] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d74e559478ce4757aae216d70db0f375 [ 661.504988] env[62096]: DEBUG oslo_concurrency.lockutils [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.045s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 661.506249] env[62096]: ERROR nova.compute.manager [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] [instance: bbed3eed-f511-4b9e-9632-74841df01592] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 25b221b6-400a-463b-902b-13eb0822967e, please check neutron logs for more information. [ 661.506249] env[62096]: ERROR nova.compute.manager [instance: bbed3eed-f511-4b9e-9632-74841df01592] Traceback (most recent call last): [ 661.506249] env[62096]: ERROR nova.compute.manager [instance: bbed3eed-f511-4b9e-9632-74841df01592] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 661.506249] env[62096]: ERROR nova.compute.manager [instance: bbed3eed-f511-4b9e-9632-74841df01592] self.driver.spawn(context, instance, image_meta, [ 661.506249] env[62096]: ERROR nova.compute.manager [instance: bbed3eed-f511-4b9e-9632-74841df01592] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 661.506249] env[62096]: ERROR nova.compute.manager [instance: bbed3eed-f511-4b9e-9632-74841df01592] self._vmops.spawn(context, instance, image_meta, injected_files, [ 661.506249] env[62096]: ERROR nova.compute.manager [instance: bbed3eed-f511-4b9e-9632-74841df01592] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 661.506249] env[62096]: ERROR nova.compute.manager [instance: bbed3eed-f511-4b9e-9632-74841df01592] vm_ref = self.build_virtual_machine(instance, [ 661.506249] env[62096]: ERROR nova.compute.manager [instance: bbed3eed-f511-4b9e-9632-74841df01592] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 661.506249] env[62096]: ERROR nova.compute.manager [instance: bbed3eed-f511-4b9e-9632-74841df01592] vif_infos = vmwarevif.get_vif_info(self._session, [ 661.506249] env[62096]: ERROR nova.compute.manager [instance: bbed3eed-f511-4b9e-9632-74841df01592] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 661.506665] env[62096]: ERROR nova.compute.manager [instance: bbed3eed-f511-4b9e-9632-74841df01592] for vif in network_info: [ 661.506665] env[62096]: ERROR nova.compute.manager [instance: bbed3eed-f511-4b9e-9632-74841df01592] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 661.506665] env[62096]: ERROR nova.compute.manager [instance: bbed3eed-f511-4b9e-9632-74841df01592] return self._sync_wrapper(fn, *args, **kwargs) [ 661.506665] env[62096]: ERROR nova.compute.manager [instance: bbed3eed-f511-4b9e-9632-74841df01592] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 661.506665] env[62096]: ERROR nova.compute.manager [instance: bbed3eed-f511-4b9e-9632-74841df01592] self.wait() [ 661.506665] env[62096]: ERROR nova.compute.manager [instance: bbed3eed-f511-4b9e-9632-74841df01592] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 661.506665] env[62096]: ERROR nova.compute.manager [instance: bbed3eed-f511-4b9e-9632-74841df01592] self[:] = self._gt.wait() [ 661.506665] env[62096]: ERROR nova.compute.manager [instance: bbed3eed-f511-4b9e-9632-74841df01592] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 661.506665] env[62096]: ERROR nova.compute.manager [instance: bbed3eed-f511-4b9e-9632-74841df01592] return self._exit_event.wait() [ 661.506665] env[62096]: ERROR nova.compute.manager [instance: bbed3eed-f511-4b9e-9632-74841df01592] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 661.506665] env[62096]: ERROR nova.compute.manager [instance: bbed3eed-f511-4b9e-9632-74841df01592] current.throw(*self._exc) [ 661.506665] env[62096]: ERROR nova.compute.manager [instance: bbed3eed-f511-4b9e-9632-74841df01592] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 661.506665] env[62096]: ERROR nova.compute.manager [instance: bbed3eed-f511-4b9e-9632-74841df01592] result = function(*args, **kwargs) [ 661.507071] env[62096]: ERROR nova.compute.manager [instance: bbed3eed-f511-4b9e-9632-74841df01592] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 661.507071] env[62096]: ERROR nova.compute.manager [instance: bbed3eed-f511-4b9e-9632-74841df01592] return func(*args, **kwargs) [ 661.507071] env[62096]: ERROR nova.compute.manager [instance: bbed3eed-f511-4b9e-9632-74841df01592] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 661.507071] env[62096]: ERROR nova.compute.manager [instance: bbed3eed-f511-4b9e-9632-74841df01592] raise e [ 661.507071] env[62096]: ERROR nova.compute.manager [instance: bbed3eed-f511-4b9e-9632-74841df01592] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 661.507071] env[62096]: ERROR nova.compute.manager [instance: bbed3eed-f511-4b9e-9632-74841df01592] nwinfo = self.network_api.allocate_for_instance( [ 661.507071] env[62096]: ERROR nova.compute.manager [instance: bbed3eed-f511-4b9e-9632-74841df01592] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 661.507071] env[62096]: ERROR nova.compute.manager [instance: bbed3eed-f511-4b9e-9632-74841df01592] created_port_ids = self._update_ports_for_instance( [ 661.507071] env[62096]: ERROR nova.compute.manager [instance: bbed3eed-f511-4b9e-9632-74841df01592] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 661.507071] env[62096]: ERROR nova.compute.manager [instance: bbed3eed-f511-4b9e-9632-74841df01592] with excutils.save_and_reraise_exception(): [ 661.507071] env[62096]: ERROR nova.compute.manager [instance: bbed3eed-f511-4b9e-9632-74841df01592] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 661.507071] env[62096]: ERROR nova.compute.manager [instance: bbed3eed-f511-4b9e-9632-74841df01592] self.force_reraise() [ 661.507071] env[62096]: ERROR nova.compute.manager [instance: bbed3eed-f511-4b9e-9632-74841df01592] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 661.507469] env[62096]: ERROR nova.compute.manager [instance: bbed3eed-f511-4b9e-9632-74841df01592] raise self.value [ 661.507469] env[62096]: ERROR nova.compute.manager [instance: bbed3eed-f511-4b9e-9632-74841df01592] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 661.507469] env[62096]: ERROR nova.compute.manager [instance: bbed3eed-f511-4b9e-9632-74841df01592] updated_port = self._update_port( [ 661.507469] env[62096]: ERROR nova.compute.manager [instance: bbed3eed-f511-4b9e-9632-74841df01592] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 661.507469] env[62096]: ERROR nova.compute.manager [instance: bbed3eed-f511-4b9e-9632-74841df01592] _ensure_no_port_binding_failure(port) [ 661.507469] env[62096]: ERROR nova.compute.manager [instance: bbed3eed-f511-4b9e-9632-74841df01592] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 661.507469] env[62096]: ERROR nova.compute.manager [instance: bbed3eed-f511-4b9e-9632-74841df01592] raise exception.PortBindingFailed(port_id=port['id']) [ 661.507469] env[62096]: ERROR nova.compute.manager [instance: bbed3eed-f511-4b9e-9632-74841df01592] nova.exception.PortBindingFailed: Binding failed for port 25b221b6-400a-463b-902b-13eb0822967e, please check neutron logs for more information. [ 661.507469] env[62096]: ERROR nova.compute.manager [instance: bbed3eed-f511-4b9e-9632-74841df01592] [ 661.507469] env[62096]: DEBUG nova.compute.utils [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] [instance: bbed3eed-f511-4b9e-9632-74841df01592] Binding failed for port 25b221b6-400a-463b-902b-13eb0822967e, please check neutron logs for more information. {{(pid=62096) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 661.507857] env[62096]: DEBUG oslo_concurrency.lockutils [None req-95f70777-dc9c-4d7d-99c8-605de6557417 tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.546s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 661.509480] env[62096]: INFO nova.compute.claims [None req-95f70777-dc9c-4d7d-99c8-605de6557417 tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 661.511492] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-95f70777-dc9c-4d7d-99c8-605de6557417 tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Expecting reply to msg d0abd48103844189b18ec9a331463612 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 661.512629] env[62096]: DEBUG nova.compute.manager [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] [instance: bbed3eed-f511-4b9e-9632-74841df01592] Build of instance bbed3eed-f511-4b9e-9632-74841df01592 was re-scheduled: Binding failed for port 25b221b6-400a-463b-902b-13eb0822967e, please check neutron logs for more information. {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 661.513055] env[62096]: DEBUG nova.compute.manager [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] [instance: bbed3eed-f511-4b9e-9632-74841df01592] Unplugging VIFs for instance {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 661.513279] env[62096]: DEBUG oslo_concurrency.lockutils [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Acquiring lock "refresh_cache-bbed3eed-f511-4b9e-9632-74841df01592" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 661.513424] env[62096]: DEBUG oslo_concurrency.lockutils [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Acquired lock "refresh_cache-bbed3eed-f511-4b9e-9632-74841df01592" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 661.513579] env[62096]: DEBUG nova.network.neutron [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] [instance: bbed3eed-f511-4b9e-9632-74841df01592] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 661.513958] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Expecting reply to msg 360b6c6a287d4e069b27e76b06e7a856 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 661.521884] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 360b6c6a287d4e069b27e76b06e7a856 [ 661.548601] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d0abd48103844189b18ec9a331463612 [ 661.549154] env[62096]: DEBUG nova.compute.manager [None req-fdc6c660-999b-45f5-b0c3-7b576c288147 tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] Starting instance... {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 661.550859] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-fdc6c660-999b-45f5-b0c3-7b576c288147 tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Expecting reply to msg ae8e77973bc743eebdda84d217616dd8 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 661.555869] env[62096]: DEBUG oslo_vmware.api [None req-70f080d3-692d-46fb-a741-1ba8c3b3362d tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] Task: {'id': task-397389, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.082999} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 661.556165] env[62096]: DEBUG nova.virt.vmwareapi.ds_util [None req-70f080d3-692d-46fb-a741-1ba8c3b3362d tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] Deleted the datastore file {{(pid=62096) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 661.556353] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-70f080d3-692d-46fb-a741-1ba8c3b3362d tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] [instance: 06b9105a-8dcb-4bff-bba2-05e179036f24] Deleted contents of the VM from datastore datastore2 {{(pid=62096) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 661.556523] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-70f080d3-692d-46fb-a741-1ba8c3b3362d tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] [instance: 06b9105a-8dcb-4bff-bba2-05e179036f24] Instance destroyed {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 661.556682] env[62096]: INFO nova.compute.manager [None req-70f080d3-692d-46fb-a741-1ba8c3b3362d tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] [instance: 06b9105a-8dcb-4bff-bba2-05e179036f24] Took 1.09 seconds to destroy the instance on the hypervisor. [ 661.556911] env[62096]: DEBUG oslo.service.loopingcall [None req-70f080d3-692d-46fb-a741-1ba8c3b3362d tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 661.557087] env[62096]: DEBUG nova.compute.manager [-] [instance: 06b9105a-8dcb-4bff-bba2-05e179036f24] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 661.557173] env[62096]: DEBUG nova.network.neutron [-] [instance: 06b9105a-8dcb-4bff-bba2-05e179036f24] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 661.579168] env[62096]: DEBUG nova.network.neutron [-] [instance: 06b9105a-8dcb-4bff-bba2-05e179036f24] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 661.579168] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg d648c1119bf84b549211de8c3f232a2e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 661.584763] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d648c1119bf84b549211de8c3f232a2e [ 661.589809] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ae8e77973bc743eebdda84d217616dd8 [ 661.647651] env[62096]: INFO nova.compute.manager [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] [instance: d9562762-52de-4a0c-b6a2-2aeaa20e47a0] Took 1.03 seconds to deallocate network for instance. [ 661.649375] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Expecting reply to msg 89e5e51647ec4e49b86c723416076142 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 661.687613] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 89e5e51647ec4e49b86c723416076142 [ 662.020365] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-95f70777-dc9c-4d7d-99c8-605de6557417 tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Expecting reply to msg a4377783af064136a1413490195b9776 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 662.024466] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a4377783af064136a1413490195b9776 [ 662.032763] env[62096]: DEBUG nova.network.neutron [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] [instance: bbed3eed-f511-4b9e-9632-74841df01592] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 662.066508] env[62096]: DEBUG oslo_concurrency.lockutils [None req-fdc6c660-999b-45f5-b0c3-7b576c288147 tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 662.079987] env[62096]: DEBUG nova.network.neutron [-] [instance: 06b9105a-8dcb-4bff-bba2-05e179036f24] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 662.080462] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg d19f0df17d3345b6a9e24c2b53ea3940 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 662.091531] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d19f0df17d3345b6a9e24c2b53ea3940 [ 662.129754] env[62096]: DEBUG nova.network.neutron [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] [instance: bbed3eed-f511-4b9e-9632-74841df01592] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 662.130279] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Expecting reply to msg 6ec678eb8eb64f12a38c8af0962d9fe7 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 662.140297] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6ec678eb8eb64f12a38c8af0962d9fe7 [ 662.153479] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Expecting reply to msg 6fb5848184e241a190b141a7f1a636da in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 662.191604] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6fb5848184e241a190b141a7f1a636da [ 662.582846] env[62096]: INFO nova.compute.manager [-] [instance: 06b9105a-8dcb-4bff-bba2-05e179036f24] Took 1.03 seconds to deallocate network for instance. [ 662.588310] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-70f080d3-692d-46fb-a741-1ba8c3b3362d tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] Expecting reply to msg 457b3706289843c1846786e850c1cc67 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 662.616498] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 457b3706289843c1846786e850c1cc67 [ 662.632815] env[62096]: DEBUG oslo_concurrency.lockutils [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Releasing lock "refresh_cache-bbed3eed-f511-4b9e-9632-74841df01592" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 662.633034] env[62096]: DEBUG nova.compute.manager [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 662.633207] env[62096]: DEBUG nova.compute.manager [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] [instance: bbed3eed-f511-4b9e-9632-74841df01592] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 662.633363] env[62096]: DEBUG nova.network.neutron [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] [instance: bbed3eed-f511-4b9e-9632-74841df01592] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 662.654908] env[62096]: DEBUG nova.network.neutron [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] [instance: bbed3eed-f511-4b9e-9632-74841df01592] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 662.655513] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Expecting reply to msg d3a4e4a3216446389bc1116f5cec9842 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 662.662420] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d3a4e4a3216446389bc1116f5cec9842 [ 662.684524] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Expecting reply to msg c0fe946fd981404b95a9f76d021ed094 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 662.697160] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c0fe946fd981404b95a9f76d021ed094 [ 662.987635] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6996b74d-7f67-4870-8477-c5d928b5f117 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.001334] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11a77a18-44b1-4e80-8035-0f02bd7838c2 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.033119] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83aff248-701c-47d0-aa9d-5388b3c8521e {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.041314] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4db72b0-f12c-45ee-9aa7-321bb8e71818 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.057585] env[62096]: DEBUG nova.compute.provider_tree [None req-95f70777-dc9c-4d7d-99c8-605de6557417 tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 663.058101] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-95f70777-dc9c-4d7d-99c8-605de6557417 tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Expecting reply to msg a2865a117a4343d481ac9ed9f5d81752 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 663.067282] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a2865a117a4343d481ac9ed9f5d81752 [ 663.091769] env[62096]: DEBUG oslo_concurrency.lockutils [None req-70f080d3-692d-46fb-a741-1ba8c3b3362d tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 663.159496] env[62096]: DEBUG nova.network.neutron [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] [instance: bbed3eed-f511-4b9e-9632-74841df01592] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 663.160072] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Expecting reply to msg fac14e580ebf40e78715472a503c62de in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 663.172894] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fac14e580ebf40e78715472a503c62de [ 663.187090] env[62096]: DEBUG oslo_concurrency.lockutils [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Lock "d9562762-52de-4a0c-b6a2-2aeaa20e47a0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 91.418s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 663.187492] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4c22e505-acaf-4907-b777-48f6dcefcfb3 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362-project-member] Expecting reply to msg f32169b85aae49f7b395bf38304ebfec in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 663.211983] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f32169b85aae49f7b395bf38304ebfec [ 663.564500] env[62096]: DEBUG nova.scheduler.client.report [None req-95f70777-dc9c-4d7d-99c8-605de6557417 tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 663.567071] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-95f70777-dc9c-4d7d-99c8-605de6557417 tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Expecting reply to msg ad041261e7b24f7eb3aa27bab5b8d3f9 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 663.581686] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ad041261e7b24f7eb3aa27bab5b8d3f9 [ 663.661998] env[62096]: INFO nova.compute.manager [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] [instance: bbed3eed-f511-4b9e-9632-74841df01592] Took 1.03 seconds to deallocate network for instance. [ 663.663725] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Expecting reply to msg 0e642b8ecd124ceabed4fb87c8fb5b85 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 663.695112] env[62096]: DEBUG nova.compute.manager [None req-4c22e505-acaf-4907-b777-48f6dcefcfb3 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362-project-member] [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] Starting instance... {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 663.696929] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4c22e505-acaf-4907-b777-48f6dcefcfb3 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362-project-member] Expecting reply to msg 85d3c6f0f41c496d9fd588d3872c6037 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 663.698226] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0e642b8ecd124ceabed4fb87c8fb5b85 [ 663.731472] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 85d3c6f0f41c496d9fd588d3872c6037 [ 664.070195] env[62096]: DEBUG oslo_concurrency.lockutils [None req-95f70777-dc9c-4d7d-99c8-605de6557417 tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.562s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 664.070751] env[62096]: DEBUG nova.compute.manager [None req-95f70777-dc9c-4d7d-99c8-605de6557417 tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] Start building networks asynchronously for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 664.072541] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-95f70777-dc9c-4d7d-99c8-605de6557417 tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Expecting reply to msg 258ac6e38fa242669e5eb749ec8cf8dc in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 664.073605] env[62096]: DEBUG oslo_concurrency.lockutils [None req-cdd99b9e-14f8-4543-8c76-d91f63d6fc9c tempest-ServerGroupTestJSON-613982516 tempest-ServerGroupTestJSON-613982516-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.929s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 664.075295] env[62096]: INFO nova.compute.claims [None req-cdd99b9e-14f8-4543-8c76-d91f63d6fc9c tempest-ServerGroupTestJSON-613982516 tempest-ServerGroupTestJSON-613982516-project-member] [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 664.076793] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-cdd99b9e-14f8-4543-8c76-d91f63d6fc9c tempest-ServerGroupTestJSON-613982516 tempest-ServerGroupTestJSON-613982516-project-member] Expecting reply to msg 54a752a980904fcb931adc31881cc4dc in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 664.125670] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 54a752a980904fcb931adc31881cc4dc [ 664.126300] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 258ac6e38fa242669e5eb749ec8cf8dc [ 664.168213] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Expecting reply to msg 22fc80f30f624ad686302daf13eb1dfd in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 664.191474] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 22fc80f30f624ad686302daf13eb1dfd [ 664.225291] env[62096]: DEBUG oslo_concurrency.lockutils [None req-4c22e505-acaf-4907-b777-48f6dcefcfb3 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 664.590417] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-cdd99b9e-14f8-4543-8c76-d91f63d6fc9c tempest-ServerGroupTestJSON-613982516 tempest-ServerGroupTestJSON-613982516-project-member] Expecting reply to msg 9e0ea14c48b44611bf10a28b14d85360 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 664.592178] env[62096]: DEBUG nova.compute.utils [None req-95f70777-dc9c-4d7d-99c8-605de6557417 tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Using /dev/sd instead of None {{(pid=62096) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 664.592785] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-95f70777-dc9c-4d7d-99c8-605de6557417 tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Expecting reply to msg 8fdf26a7d7b44a8a846dd8070cd6ef1d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 664.593746] env[62096]: DEBUG nova.compute.manager [None req-95f70777-dc9c-4d7d-99c8-605de6557417 tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] Allocating IP information in the background. {{(pid=62096) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 664.593936] env[62096]: DEBUG nova.network.neutron [None req-95f70777-dc9c-4d7d-99c8-605de6557417 tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] allocate_for_instance() {{(pid=62096) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 664.599670] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9e0ea14c48b44611bf10a28b14d85360 [ 664.607272] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8fdf26a7d7b44a8a846dd8070cd6ef1d [ 664.635792] env[62096]: DEBUG nova.policy [None req-95f70777-dc9c-4d7d-99c8-605de6557417 tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '22b67c9741f5435c89dcd9e8ea1911a7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '78667758c2e9405680900f07d5619066', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62096) authorize /opt/stack/nova/nova/policy.py:203}} [ 664.671824] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Expecting reply to msg 2ca16ead22934d47a2ac17ef47a8f78e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 664.698577] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2ca16ead22934d47a2ac17ef47a8f78e [ 665.077003] env[62096]: DEBUG nova.network.neutron [None req-95f70777-dc9c-4d7d-99c8-605de6557417 tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] Successfully created port: 71f35985-e6b7-42f9-b1d6-19ed301c27b5 {{(pid=62096) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 665.097002] env[62096]: DEBUG nova.compute.manager [None req-95f70777-dc9c-4d7d-99c8-605de6557417 tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] Start building block device mappings for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 665.098640] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-95f70777-dc9c-4d7d-99c8-605de6557417 tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Expecting reply to msg eac6bf03ce854627bbbab5bdfd9125b6 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 665.139652] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg eac6bf03ce854627bbbab5bdfd9125b6 [ 665.180592] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Expecting reply to msg 355064d6d6ac447c9a6c72ecd588d80e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 665.209783] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 355064d6d6ac447c9a6c72ecd588d80e [ 665.517652] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b63105c3-5b9b-4162-9fcf-a90a7febab34 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.526736] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13d4a98c-f3d4-4eed-aea8-790d9272a8fc {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.564429] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c28aba9-02fe-4e4a-8e05-5ef7e20a23f9 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.575807] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d213c39f-2952-4f9d-892a-c54393d3eb19 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.592725] env[62096]: DEBUG nova.compute.provider_tree [None req-cdd99b9e-14f8-4543-8c76-d91f63d6fc9c tempest-ServerGroupTestJSON-613982516 tempest-ServerGroupTestJSON-613982516-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 665.593318] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-cdd99b9e-14f8-4543-8c76-d91f63d6fc9c tempest-ServerGroupTestJSON-613982516 tempest-ServerGroupTestJSON-613982516-project-member] Expecting reply to msg 370cd563c4894ca59a447c196d37b589 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 665.601513] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 370cd563c4894ca59a447c196d37b589 [ 665.603665] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-95f70777-dc9c-4d7d-99c8-605de6557417 tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Expecting reply to msg 2361b4831f654c8fb9da9f96f728f3a5 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 665.621086] env[62096]: DEBUG nova.network.neutron [None req-95f70777-dc9c-4d7d-99c8-605de6557417 tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] Successfully created port: 5cd1261b-8391-4db4-a96a-fe9263ab4e00 {{(pid=62096) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 665.644471] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2361b4831f654c8fb9da9f96f728f3a5 [ 665.701877] env[62096]: DEBUG oslo_concurrency.lockutils [None req-1fdde52c-6430-4ad4-8e03-8e742fec6718 tempest-ListServersNegativeTestJSON-431532534 tempest-ListServersNegativeTestJSON-431532534-project-member] Lock "bbed3eed-f511-4b9e-9632-74841df01592" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 93.890s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 665.702711] env[62096]: Traceback (most recent call last): [ 665.702806] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 665.702806] env[62096]: self.driver.spawn(context, instance, image_meta, [ 665.702806] env[62096]: File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 665.702806] env[62096]: self._vmops.spawn(context, instance, image_meta, injected_files, [ 665.702806] env[62096]: File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 665.702806] env[62096]: vm_ref = self.build_virtual_machine(instance, [ 665.702806] env[62096]: File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 665.702806] env[62096]: vif_infos = vmwarevif.get_vif_info(self._session, [ 665.702806] env[62096]: File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 665.702806] env[62096]: for vif in network_info: [ 665.702806] env[62096]: File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 665.702806] env[62096]: return self._sync_wrapper(fn, *args, **kwargs) [ 665.702806] env[62096]: File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 665.702806] env[62096]: self.wait() [ 665.702806] env[62096]: File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 665.702806] env[62096]: self[:] = self._gt.wait() [ 665.702806] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 665.702806] env[62096]: return self._exit_event.wait() [ 665.702806] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 665.702806] env[62096]: current.throw(*self._exc) [ 665.702806] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 665.702806] env[62096]: result = function(*args, **kwargs) [ 665.702806] env[62096]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 665.702806] env[62096]: return func(*args, **kwargs) [ 665.702806] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 665.702806] env[62096]: raise e [ 665.702806] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 665.702806] env[62096]: nwinfo = self.network_api.allocate_for_instance( [ 665.702806] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 665.702806] env[62096]: created_port_ids = self._update_ports_for_instance( [ 665.702806] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 665.702806] env[62096]: with excutils.save_and_reraise_exception(): [ 665.703780] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 665.703780] env[62096]: self.force_reraise() [ 665.703780] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 665.703780] env[62096]: raise self.value [ 665.703780] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 665.703780] env[62096]: updated_port = self._update_port( [ 665.703780] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 665.703780] env[62096]: _ensure_no_port_binding_failure(port) [ 665.703780] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 665.703780] env[62096]: raise exception.PortBindingFailed(port_id=port['id']) [ 665.703780] env[62096]: nova.exception.PortBindingFailed: Binding failed for port 25b221b6-400a-463b-902b-13eb0822967e, please check neutron logs for more information. [ 665.703780] env[62096]: During handling of the above exception, another exception occurred: [ 665.703780] env[62096]: Traceback (most recent call last): [ 665.703780] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 2456, in _do_build_and_run_instance [ 665.703780] env[62096]: self._build_and_run_instance(context, instance, image, [ 665.703780] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 2748, in _build_and_run_instance [ 665.703780] env[62096]: raise exception.RescheduledException( [ 665.703780] env[62096]: nova.exception.RescheduledException: Build of instance bbed3eed-f511-4b9e-9632-74841df01592 was re-scheduled: Binding failed for port 25b221b6-400a-463b-902b-13eb0822967e, please check neutron logs for more information. [ 665.703780] env[62096]: During handling of the above exception, another exception occurred: [ 665.703780] env[62096]: Traceback (most recent call last): [ 665.703780] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenpool.py", line 87, in _spawn_n_impl [ 665.703780] env[62096]: func(*args, **kwargs) [ 665.703780] env[62096]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 665.703780] env[62096]: return func(*args, **kwargs) [ 665.703780] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 412, in inner [ 665.703780] env[62096]: return f(*args, **kwargs) [ 665.703780] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 2347, in _locked_do_build_and_run_instance [ 665.703780] env[62096]: result = self._do_build_and_run_instance(*args, **kwargs) [ 665.704661] env[62096]: File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 665.704661] env[62096]: with excutils.save_and_reraise_exception(): [ 665.704661] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 665.704661] env[62096]: self.force_reraise() [ 665.704661] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 665.704661] env[62096]: raise self.value [ 665.704661] env[62096]: File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 665.704661] env[62096]: return f(self, context, *args, **kw) [ 665.704661] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 665.704661] env[62096]: with excutils.save_and_reraise_exception(): [ 665.704661] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 665.704661] env[62096]: self.force_reraise() [ 665.704661] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 665.704661] env[62096]: raise self.value [ 665.704661] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 665.704661] env[62096]: return function(self, context, *args, **kwargs) [ 665.704661] env[62096]: File "/opt/stack/nova/nova/compute/utils.py", line 1453, in decorated_function [ 665.704661] env[62096]: return function(self, context, *args, **kwargs) [ 665.704661] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 665.704661] env[62096]: return function(self, context, *args, **kwargs) [ 665.704661] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 2491, in _do_build_and_run_instance [ 665.704661] env[62096]: instance.save() [ 665.704661] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_versionedobjects/base.py", line 209, in wrapper [ 665.704661] env[62096]: updates, result = self.indirection_api.object_action( [ 665.704661] env[62096]: File "/opt/stack/nova/nova/conductor/rpcapi.py", line 247, in object_action [ 665.704661] env[62096]: return cctxt.call(context, 'object_action', objinst=objinst, [ 665.704661] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/client.py", line 190, in call [ 665.704661] env[62096]: result = self.transport._send( [ 665.704661] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/transport.py", line 123, in _send [ 665.704661] env[62096]: return self._driver.send(target, ctxt, message, [ 665.705442] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 788, in send [ 665.705442] env[62096]: return self._send(target, ctxt, message, wait_for_reply, timeout, [ 665.705442] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 780, in _send [ 665.705442] env[62096]: raise result [ 665.705442] env[62096]: nova.exception_Remote.InstanceNotFound_Remote: Instance bbed3eed-f511-4b9e-9632-74841df01592 could not be found. [ 665.705442] env[62096]: Traceback (most recent call last): [ 665.705442] env[62096]: File "/opt/stack/nova/nova/conductor/manager.py", line 142, in _object_dispatch [ 665.705442] env[62096]: return getattr(target, method)(*args, **kwargs) [ 665.705442] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_versionedobjects/base.py", line 226, in wrapper [ 665.705442] env[62096]: return fn(self, *args, **kwargs) [ 665.705442] env[62096]: File "/opt/stack/nova/nova/objects/instance.py", line 878, in save [ 665.705442] env[62096]: old_ref, inst_ref = db.instance_update_and_get_original( [ 665.705442] env[62096]: File "/opt/stack/nova/nova/db/utils.py", line 35, in wrapper [ 665.705442] env[62096]: return f(*args, **kwargs) [ 665.705442] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/api.py", line 144, in wrapper [ 665.705442] env[62096]: with excutils.save_and_reraise_exception() as ectxt: [ 665.705442] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 665.705442] env[62096]: self.force_reraise() [ 665.705442] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 665.705442] env[62096]: raise self.value [ 665.705442] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/api.py", line 142, in wrapper [ 665.705442] env[62096]: return f(*args, **kwargs) [ 665.705442] env[62096]: File "/opt/stack/nova/nova/db/main/api.py", line 207, in wrapper [ 665.705442] env[62096]: return f(context, *args, **kwargs) [ 665.705442] env[62096]: File "/opt/stack/nova/nova/db/main/api.py", line 2283, in instance_update_and_get_original [ 665.705442] env[62096]: instance_ref = _instance_get_by_uuid(context, instance_uuid, [ 665.705442] env[62096]: File "/opt/stack/nova/nova/db/main/api.py", line 1405, in _instance_get_by_uuid [ 665.705442] env[62096]: raise exception.InstanceNotFound(instance_id=uuid) [ 665.705442] env[62096]: nova.exception.InstanceNotFound: Instance bbed3eed-f511-4b9e-9632-74841df01592 could not be found. [ 665.706269] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-74a880f6-b7ea-4e56-b7f7-9e11c1a0c4ac tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Expecting reply to msg 9f742782562d478aaf1d567bc3077302 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 665.716800] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9f742782562d478aaf1d567bc3077302 [ 665.988997] env[62096]: DEBUG nova.network.neutron [None req-95f70777-dc9c-4d7d-99c8-605de6557417 tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] Successfully created port: 4ef1f5b5-153f-4830-aa06-3fceeeb9cea6 {{(pid=62096) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 666.096223] env[62096]: DEBUG nova.scheduler.client.report [None req-cdd99b9e-14f8-4543-8c76-d91f63d6fc9c tempest-ServerGroupTestJSON-613982516 tempest-ServerGroupTestJSON-613982516-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 666.098637] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-cdd99b9e-14f8-4543-8c76-d91f63d6fc9c tempest-ServerGroupTestJSON-613982516 tempest-ServerGroupTestJSON-613982516-project-member] Expecting reply to msg 63eef2ff25b842acabd663f800e2704c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 666.113836] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 63eef2ff25b842acabd663f800e2704c [ 666.116540] env[62096]: DEBUG nova.compute.manager [None req-95f70777-dc9c-4d7d-99c8-605de6557417 tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] Start spawning the instance on the hypervisor. {{(pid=62096) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 666.142988] env[62096]: DEBUG nova.virt.hardware [None req-95f70777-dc9c-4d7d-99c8-605de6557417 tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-19T11:52:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-19T11:51:55Z,direct_url=,disk_format='vmdk',id=fb764baa-3805-45c4-a694-aa91b0932110,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bd76085d004b408bbcf1ebf76ba2593e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-19T11:51:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 666.143239] env[62096]: DEBUG nova.virt.hardware [None req-95f70777-dc9c-4d7d-99c8-605de6557417 tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Flavor limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 666.143397] env[62096]: DEBUG nova.virt.hardware [None req-95f70777-dc9c-4d7d-99c8-605de6557417 tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Image limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 666.143579] env[62096]: DEBUG nova.virt.hardware [None req-95f70777-dc9c-4d7d-99c8-605de6557417 tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Flavor pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 666.143720] env[62096]: DEBUG nova.virt.hardware [None req-95f70777-dc9c-4d7d-99c8-605de6557417 tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Image pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 666.143894] env[62096]: DEBUG nova.virt.hardware [None req-95f70777-dc9c-4d7d-99c8-605de6557417 tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 666.144117] env[62096]: DEBUG nova.virt.hardware [None req-95f70777-dc9c-4d7d-99c8-605de6557417 tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 666.144284] env[62096]: DEBUG nova.virt.hardware [None req-95f70777-dc9c-4d7d-99c8-605de6557417 tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 666.144463] env[62096]: DEBUG nova.virt.hardware [None req-95f70777-dc9c-4d7d-99c8-605de6557417 tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Got 1 possible topologies {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 666.144606] env[62096]: DEBUG nova.virt.hardware [None req-95f70777-dc9c-4d7d-99c8-605de6557417 tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 666.144774] env[62096]: DEBUG nova.virt.hardware [None req-95f70777-dc9c-4d7d-99c8-605de6557417 tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 666.145612] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f7aefa8-4d03-4941-bea9-836e3f799d7d {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.153532] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a63c3e2a-bc51-4b53-b490-34515fa9df0a {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.209350] env[62096]: DEBUG nova.compute.manager [None req-74a880f6-b7ea-4e56-b7f7-9e11c1a0c4ac tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] Starting instance... {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 666.211142] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-74a880f6-b7ea-4e56-b7f7-9e11c1a0c4ac tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Expecting reply to msg 6e45bc14ffa64ab0bb6f054ddb5a42af in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 666.259305] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6e45bc14ffa64ab0bb6f054ddb5a42af [ 666.602554] env[62096]: DEBUG oslo_concurrency.lockutils [None req-cdd99b9e-14f8-4543-8c76-d91f63d6fc9c tempest-ServerGroupTestJSON-613982516 tempest-ServerGroupTestJSON-613982516-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.529s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 666.603718] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-cdd99b9e-14f8-4543-8c76-d91f63d6fc9c tempest-ServerGroupTestJSON-613982516 tempest-ServerGroupTestJSON-613982516-project-member] Expecting reply to msg 92e08ec69f634bd2bc0d00cf6ce9e384 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 666.603866] env[62096]: DEBUG oslo_concurrency.lockutils [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 17.420s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 666.606064] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Expecting reply to msg 120fbf00fff64f05b965bc13dd4ca97b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 666.623844] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 92e08ec69f634bd2bc0d00cf6ce9e384 [ 666.654842] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 120fbf00fff64f05b965bc13dd4ca97b [ 666.747273] env[62096]: DEBUG oslo_concurrency.lockutils [None req-74a880f6-b7ea-4e56-b7f7-9e11c1a0c4ac tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 667.120269] env[62096]: DEBUG oslo_concurrency.lockutils [None req-cdd99b9e-14f8-4543-8c76-d91f63d6fc9c tempest-ServerGroupTestJSON-613982516 tempest-ServerGroupTestJSON-613982516-project-member] Acquiring lock "5c4178e5-8a53-43b6-ba15-fc0a4f593ec8" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 667.120269] env[62096]: DEBUG oslo_concurrency.lockutils [None req-cdd99b9e-14f8-4543-8c76-d91f63d6fc9c tempest-ServerGroupTestJSON-613982516 tempest-ServerGroupTestJSON-613982516-project-member] Lock "5c4178e5-8a53-43b6-ba15-fc0a4f593ec8" acquired by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 667.120269] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-cdd99b9e-14f8-4543-8c76-d91f63d6fc9c tempest-ServerGroupTestJSON-613982516 tempest-ServerGroupTestJSON-613982516-project-member] Expecting reply to msg 0ee4fbd347ae4d43afb4266f89ba844e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 667.124038] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0ee4fbd347ae4d43afb4266f89ba844e [ 667.583724] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-681cf8d5-47c7-458c-ae7c-2f344942c3cf {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.592820] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02736e5f-9268-43e3-93ef-6d608d3972c2 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.621726] env[62096]: DEBUG oslo_concurrency.lockutils [None req-cdd99b9e-14f8-4543-8c76-d91f63d6fc9c tempest-ServerGroupTestJSON-613982516 tempest-ServerGroupTestJSON-613982516-project-member] Lock "5c4178e5-8a53-43b6-ba15-fc0a4f593ec8" "released" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: held 0.504s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 667.622296] env[62096]: DEBUG nova.compute.manager [None req-cdd99b9e-14f8-4543-8c76-d91f63d6fc9c tempest-ServerGroupTestJSON-613982516 tempest-ServerGroupTestJSON-613982516-project-member] [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] Start building networks asynchronously for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 667.624062] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-cdd99b9e-14f8-4543-8c76-d91f63d6fc9c tempest-ServerGroupTestJSON-613982516 tempest-ServerGroupTestJSON-613982516-project-member] Expecting reply to msg 5711f798b068422a98646918c7685364 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 667.629568] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa55ca6c-0eb4-4afd-8fb2-9153e4b89805 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.633919] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-190fd8d2-1531-4c23-9803-ba0e5b0a5f89 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.646867] env[62096]: DEBUG nova.compute.provider_tree [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 667.647302] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Expecting reply to msg 7a2233668d9a4beeb4c133acea9254cf in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 667.661361] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7a2233668d9a4beeb4c133acea9254cf [ 667.665370] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5711f798b068422a98646918c7685364 [ 667.837326] env[62096]: DEBUG nova.compute.manager [req-78bc9729-bbb9-4a42-a5d8-ef667080e4e9 req-04ebb7c7-dc8f-4529-9579-635b81052fb4 service nova] [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] Received event network-changed-71f35985-e6b7-42f9-b1d6-19ed301c27b5 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 667.837469] env[62096]: DEBUG nova.compute.manager [req-78bc9729-bbb9-4a42-a5d8-ef667080e4e9 req-04ebb7c7-dc8f-4529-9579-635b81052fb4 service nova] [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] Refreshing instance network info cache due to event network-changed-71f35985-e6b7-42f9-b1d6-19ed301c27b5. {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 667.837672] env[62096]: DEBUG oslo_concurrency.lockutils [req-78bc9729-bbb9-4a42-a5d8-ef667080e4e9 req-04ebb7c7-dc8f-4529-9579-635b81052fb4 service nova] Acquiring lock "refresh_cache-e5c75298-b9ef-4e28-a038-b55d8e198539" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 667.837805] env[62096]: DEBUG oslo_concurrency.lockutils [req-78bc9729-bbb9-4a42-a5d8-ef667080e4e9 req-04ebb7c7-dc8f-4529-9579-635b81052fb4 service nova] Acquired lock "refresh_cache-e5c75298-b9ef-4e28-a038-b55d8e198539" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 667.837967] env[62096]: DEBUG nova.network.neutron [req-78bc9729-bbb9-4a42-a5d8-ef667080e4e9 req-04ebb7c7-dc8f-4529-9579-635b81052fb4 service nova] [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] Refreshing network info cache for port 71f35985-e6b7-42f9-b1d6-19ed301c27b5 {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 667.838369] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-78bc9729-bbb9-4a42-a5d8-ef667080e4e9 req-04ebb7c7-dc8f-4529-9579-635b81052fb4 service nova] Expecting reply to msg a480c68b14f040f1857a8771c06cc838 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 667.846087] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a480c68b14f040f1857a8771c06cc838 [ 667.945963] env[62096]: ERROR nova.compute.manager [None req-95f70777-dc9c-4d7d-99c8-605de6557417 tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 71f35985-e6b7-42f9-b1d6-19ed301c27b5, please check neutron logs for more information. [ 667.945963] env[62096]: ERROR nova.compute.manager Traceback (most recent call last): [ 667.945963] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 667.945963] env[62096]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 667.945963] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 667.945963] env[62096]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 667.945963] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 667.945963] env[62096]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 667.945963] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 667.945963] env[62096]: ERROR nova.compute.manager self.force_reraise() [ 667.945963] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 667.945963] env[62096]: ERROR nova.compute.manager raise self.value [ 667.945963] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 667.945963] env[62096]: ERROR nova.compute.manager updated_port = self._update_port( [ 667.945963] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 667.945963] env[62096]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 667.946514] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 667.946514] env[62096]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 667.946514] env[62096]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 71f35985-e6b7-42f9-b1d6-19ed301c27b5, please check neutron logs for more information. [ 667.946514] env[62096]: ERROR nova.compute.manager [ 667.946514] env[62096]: Traceback (most recent call last): [ 667.946514] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 667.946514] env[62096]: listener.cb(fileno) [ 667.946514] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 667.946514] env[62096]: result = function(*args, **kwargs) [ 667.946514] env[62096]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 667.946514] env[62096]: return func(*args, **kwargs) [ 667.946514] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 667.946514] env[62096]: raise e [ 667.946514] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 667.946514] env[62096]: nwinfo = self.network_api.allocate_for_instance( [ 667.946514] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 667.946514] env[62096]: created_port_ids = self._update_ports_for_instance( [ 667.946514] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 667.946514] env[62096]: with excutils.save_and_reraise_exception(): [ 667.946514] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 667.946514] env[62096]: self.force_reraise() [ 667.946514] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 667.946514] env[62096]: raise self.value [ 667.946514] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 667.946514] env[62096]: updated_port = self._update_port( [ 667.946514] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 667.946514] env[62096]: _ensure_no_port_binding_failure(port) [ 667.946514] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 667.946514] env[62096]: raise exception.PortBindingFailed(port_id=port['id']) [ 667.947343] env[62096]: nova.exception.PortBindingFailed: Binding failed for port 71f35985-e6b7-42f9-b1d6-19ed301c27b5, please check neutron logs for more information. [ 667.947343] env[62096]: Removing descriptor: 16 [ 667.947343] env[62096]: ERROR nova.compute.manager [None req-95f70777-dc9c-4d7d-99c8-605de6557417 tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 71f35985-e6b7-42f9-b1d6-19ed301c27b5, please check neutron logs for more information. [ 667.947343] env[62096]: ERROR nova.compute.manager [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] Traceback (most recent call last): [ 667.947343] env[62096]: ERROR nova.compute.manager [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 667.947343] env[62096]: ERROR nova.compute.manager [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] yield resources [ 667.947343] env[62096]: ERROR nova.compute.manager [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 667.947343] env[62096]: ERROR nova.compute.manager [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] self.driver.spawn(context, instance, image_meta, [ 667.947343] env[62096]: ERROR nova.compute.manager [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 667.947343] env[62096]: ERROR nova.compute.manager [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] self._vmops.spawn(context, instance, image_meta, injected_files, [ 667.947343] env[62096]: ERROR nova.compute.manager [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 667.947343] env[62096]: ERROR nova.compute.manager [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] vm_ref = self.build_virtual_machine(instance, [ 667.947781] env[62096]: ERROR nova.compute.manager [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 667.947781] env[62096]: ERROR nova.compute.manager [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] vif_infos = vmwarevif.get_vif_info(self._session, [ 667.947781] env[62096]: ERROR nova.compute.manager [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 667.947781] env[62096]: ERROR nova.compute.manager [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] for vif in network_info: [ 667.947781] env[62096]: ERROR nova.compute.manager [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 667.947781] env[62096]: ERROR nova.compute.manager [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] return self._sync_wrapper(fn, *args, **kwargs) [ 667.947781] env[62096]: ERROR nova.compute.manager [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 667.947781] env[62096]: ERROR nova.compute.manager [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] self.wait() [ 667.947781] env[62096]: ERROR nova.compute.manager [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 667.947781] env[62096]: ERROR nova.compute.manager [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] self[:] = self._gt.wait() [ 667.947781] env[62096]: ERROR nova.compute.manager [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 667.947781] env[62096]: ERROR nova.compute.manager [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] return self._exit_event.wait() [ 667.947781] env[62096]: ERROR nova.compute.manager [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 667.948182] env[62096]: ERROR nova.compute.manager [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] result = hub.switch() [ 667.948182] env[62096]: ERROR nova.compute.manager [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 667.948182] env[62096]: ERROR nova.compute.manager [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] return self.greenlet.switch() [ 667.948182] env[62096]: ERROR nova.compute.manager [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 667.948182] env[62096]: ERROR nova.compute.manager [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] result = function(*args, **kwargs) [ 667.948182] env[62096]: ERROR nova.compute.manager [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 667.948182] env[62096]: ERROR nova.compute.manager [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] return func(*args, **kwargs) [ 667.948182] env[62096]: ERROR nova.compute.manager [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 667.948182] env[62096]: ERROR nova.compute.manager [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] raise e [ 667.948182] env[62096]: ERROR nova.compute.manager [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 667.948182] env[62096]: ERROR nova.compute.manager [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] nwinfo = self.network_api.allocate_for_instance( [ 667.948182] env[62096]: ERROR nova.compute.manager [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 667.948182] env[62096]: ERROR nova.compute.manager [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] created_port_ids = self._update_ports_for_instance( [ 667.948617] env[62096]: ERROR nova.compute.manager [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 667.948617] env[62096]: ERROR nova.compute.manager [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] with excutils.save_and_reraise_exception(): [ 667.948617] env[62096]: ERROR nova.compute.manager [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 667.948617] env[62096]: ERROR nova.compute.manager [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] self.force_reraise() [ 667.948617] env[62096]: ERROR nova.compute.manager [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 667.948617] env[62096]: ERROR nova.compute.manager [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] raise self.value [ 667.948617] env[62096]: ERROR nova.compute.manager [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 667.948617] env[62096]: ERROR nova.compute.manager [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] updated_port = self._update_port( [ 667.948617] env[62096]: ERROR nova.compute.manager [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 667.948617] env[62096]: ERROR nova.compute.manager [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] _ensure_no_port_binding_failure(port) [ 667.948617] env[62096]: ERROR nova.compute.manager [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 667.948617] env[62096]: ERROR nova.compute.manager [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] raise exception.PortBindingFailed(port_id=port['id']) [ 667.949024] env[62096]: ERROR nova.compute.manager [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] nova.exception.PortBindingFailed: Binding failed for port 71f35985-e6b7-42f9-b1d6-19ed301c27b5, please check neutron logs for more information. [ 667.949024] env[62096]: ERROR nova.compute.manager [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] [ 667.949024] env[62096]: INFO nova.compute.manager [None req-95f70777-dc9c-4d7d-99c8-605de6557417 tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] Terminating instance [ 667.955791] env[62096]: DEBUG oslo_concurrency.lockutils [None req-95f70777-dc9c-4d7d-99c8-605de6557417 tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Acquiring lock "refresh_cache-e5c75298-b9ef-4e28-a038-b55d8e198539" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 668.130870] env[62096]: DEBUG nova.compute.utils [None req-cdd99b9e-14f8-4543-8c76-d91f63d6fc9c tempest-ServerGroupTestJSON-613982516 tempest-ServerGroupTestJSON-613982516-project-member] Using /dev/sd instead of None {{(pid=62096) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 668.131517] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-cdd99b9e-14f8-4543-8c76-d91f63d6fc9c tempest-ServerGroupTestJSON-613982516 tempest-ServerGroupTestJSON-613982516-project-member] Expecting reply to msg 9cc5df0cd5c74a91b9e9c5e3aa8a180d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 668.132513] env[62096]: DEBUG nova.compute.manager [None req-cdd99b9e-14f8-4543-8c76-d91f63d6fc9c tempest-ServerGroupTestJSON-613982516 tempest-ServerGroupTestJSON-613982516-project-member] [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] Allocating IP information in the background. {{(pid=62096) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 668.132695] env[62096]: DEBUG nova.network.neutron [None req-cdd99b9e-14f8-4543-8c76-d91f63d6fc9c tempest-ServerGroupTestJSON-613982516 tempest-ServerGroupTestJSON-613982516-project-member] [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] allocate_for_instance() {{(pid=62096) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 668.147120] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9cc5df0cd5c74a91b9e9c5e3aa8a180d [ 668.149492] env[62096]: DEBUG nova.scheduler.client.report [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 668.151801] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Expecting reply to msg 8a3ce000da314b719d0dbbea6be3d167 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 668.166905] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8a3ce000da314b719d0dbbea6be3d167 [ 668.183917] env[62096]: DEBUG nova.policy [None req-cdd99b9e-14f8-4543-8c76-d91f63d6fc9c tempest-ServerGroupTestJSON-613982516 tempest-ServerGroupTestJSON-613982516-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '981c934926ba4466bf0d4ab91d3f6cb2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7df44d24a05543a283a1de35b3b67884', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62096) authorize /opt/stack/nova/nova/policy.py:203}} [ 668.499083] env[62096]: DEBUG nova.network.neutron [req-78bc9729-bbb9-4a42-a5d8-ef667080e4e9 req-04ebb7c7-dc8f-4529-9579-635b81052fb4 service nova] [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 668.499083] env[62096]: DEBUG nova.network.neutron [req-78bc9729-bbb9-4a42-a5d8-ef667080e4e9 req-04ebb7c7-dc8f-4529-9579-635b81052fb4 service nova] [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 668.499083] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-78bc9729-bbb9-4a42-a5d8-ef667080e4e9 req-04ebb7c7-dc8f-4529-9579-635b81052fb4 service nova] Expecting reply to msg 24235c34731b4c32abc21b10a4c1f692 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 668.499083] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 24235c34731b4c32abc21b10a4c1f692 [ 668.636522] env[62096]: DEBUG nova.compute.manager [None req-cdd99b9e-14f8-4543-8c76-d91f63d6fc9c tempest-ServerGroupTestJSON-613982516 tempest-ServerGroupTestJSON-613982516-project-member] [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] Start building block device mappings for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 668.638259] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-cdd99b9e-14f8-4543-8c76-d91f63d6fc9c tempest-ServerGroupTestJSON-613982516 tempest-ServerGroupTestJSON-613982516-project-member] Expecting reply to msg 84dc709dee814f618f9b3cac324c3241 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 668.654026] env[62096]: DEBUG oslo_concurrency.lockutils [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.050s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 668.654689] env[62096]: ERROR nova.compute.manager [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 6c8587cf-1294-4940-88ce-d97511d27160, please check neutron logs for more information. [ 668.654689] env[62096]: ERROR nova.compute.manager [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] Traceback (most recent call last): [ 668.654689] env[62096]: ERROR nova.compute.manager [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 668.654689] env[62096]: ERROR nova.compute.manager [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] self.driver.spawn(context, instance, image_meta, [ 668.654689] env[62096]: ERROR nova.compute.manager [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 668.654689] env[62096]: ERROR nova.compute.manager [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 668.654689] env[62096]: ERROR nova.compute.manager [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 668.654689] env[62096]: ERROR nova.compute.manager [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] vm_ref = self.build_virtual_machine(instance, [ 668.654689] env[62096]: ERROR nova.compute.manager [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 668.654689] env[62096]: ERROR nova.compute.manager [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] vif_infos = vmwarevif.get_vif_info(self._session, [ 668.654689] env[62096]: ERROR nova.compute.manager [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 668.655438] env[62096]: ERROR nova.compute.manager [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] for vif in network_info: [ 668.655438] env[62096]: ERROR nova.compute.manager [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 668.655438] env[62096]: ERROR nova.compute.manager [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] return self._sync_wrapper(fn, *args, **kwargs) [ 668.655438] env[62096]: ERROR nova.compute.manager [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 668.655438] env[62096]: ERROR nova.compute.manager [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] self.wait() [ 668.655438] env[62096]: ERROR nova.compute.manager [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 668.655438] env[62096]: ERROR nova.compute.manager [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] self[:] = self._gt.wait() [ 668.655438] env[62096]: ERROR nova.compute.manager [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 668.655438] env[62096]: ERROR nova.compute.manager [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] return self._exit_event.wait() [ 668.655438] env[62096]: ERROR nova.compute.manager [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 668.655438] env[62096]: ERROR nova.compute.manager [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] current.throw(*self._exc) [ 668.655438] env[62096]: ERROR nova.compute.manager [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 668.655438] env[62096]: ERROR nova.compute.manager [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] result = function(*args, **kwargs) [ 668.655783] env[62096]: ERROR nova.compute.manager [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 668.655783] env[62096]: ERROR nova.compute.manager [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] return func(*args, **kwargs) [ 668.655783] env[62096]: ERROR nova.compute.manager [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 668.655783] env[62096]: ERROR nova.compute.manager [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] raise e [ 668.655783] env[62096]: ERROR nova.compute.manager [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 668.655783] env[62096]: ERROR nova.compute.manager [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] nwinfo = self.network_api.allocate_for_instance( [ 668.655783] env[62096]: ERROR nova.compute.manager [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 668.655783] env[62096]: ERROR nova.compute.manager [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] created_port_ids = self._update_ports_for_instance( [ 668.655783] env[62096]: ERROR nova.compute.manager [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 668.655783] env[62096]: ERROR nova.compute.manager [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] with excutils.save_and_reraise_exception(): [ 668.655783] env[62096]: ERROR nova.compute.manager [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 668.655783] env[62096]: ERROR nova.compute.manager [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] self.force_reraise() [ 668.655783] env[62096]: ERROR nova.compute.manager [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 668.656208] env[62096]: ERROR nova.compute.manager [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] raise self.value [ 668.656208] env[62096]: ERROR nova.compute.manager [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 668.656208] env[62096]: ERROR nova.compute.manager [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] updated_port = self._update_port( [ 668.656208] env[62096]: ERROR nova.compute.manager [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 668.656208] env[62096]: ERROR nova.compute.manager [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] _ensure_no_port_binding_failure(port) [ 668.656208] env[62096]: ERROR nova.compute.manager [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 668.656208] env[62096]: ERROR nova.compute.manager [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] raise exception.PortBindingFailed(port_id=port['id']) [ 668.656208] env[62096]: ERROR nova.compute.manager [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] nova.exception.PortBindingFailed: Binding failed for port 6c8587cf-1294-4940-88ce-d97511d27160, please check neutron logs for more information. [ 668.656208] env[62096]: ERROR nova.compute.manager [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] [ 668.656208] env[62096]: DEBUG nova.compute.utils [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] Binding failed for port 6c8587cf-1294-4940-88ce-d97511d27160, please check neutron logs for more information. {{(pid=62096) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 668.656725] env[62096]: DEBUG oslo_concurrency.lockutils [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 17.220s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 668.658387] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Expecting reply to msg 8f860fca9f69449f9550d978f2e4fb02 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 668.660266] env[62096]: DEBUG nova.compute.manager [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] Build of instance c2a06721-6848-4fc4-89da-5d292853b6e9 was re-scheduled: Binding failed for port 6c8587cf-1294-4940-88ce-d97511d27160, please check neutron logs for more information. {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 668.661026] env[62096]: DEBUG nova.compute.manager [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] Unplugging VIFs for instance {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 668.661252] env[62096]: DEBUG oslo_concurrency.lockutils [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Acquiring lock "refresh_cache-c2a06721-6848-4fc4-89da-5d292853b6e9" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 668.661392] env[62096]: DEBUG oslo_concurrency.lockutils [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Acquired lock "refresh_cache-c2a06721-6848-4fc4-89da-5d292853b6e9" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 668.661550] env[62096]: DEBUG nova.network.neutron [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 668.661926] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Expecting reply to msg 079bed893acf443bb978033fbb0f0a3d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 668.679008] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 079bed893acf443bb978033fbb0f0a3d [ 668.684113] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 84dc709dee814f618f9b3cac324c3241 [ 668.694556] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8f860fca9f69449f9550d978f2e4fb02 [ 668.815577] env[62096]: DEBUG nova.network.neutron [None req-cdd99b9e-14f8-4543-8c76-d91f63d6fc9c tempest-ServerGroupTestJSON-613982516 tempest-ServerGroupTestJSON-613982516-project-member] [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] Successfully created port: 3cd450aa-2fc5-4d09-a571-2b6c9545419d {{(pid=62096) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 668.988518] env[62096]: DEBUG oslo_concurrency.lockutils [req-78bc9729-bbb9-4a42-a5d8-ef667080e4e9 req-04ebb7c7-dc8f-4529-9579-635b81052fb4 service nova] Releasing lock "refresh_cache-e5c75298-b9ef-4e28-a038-b55d8e198539" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 668.989109] env[62096]: DEBUG oslo_concurrency.lockutils [None req-95f70777-dc9c-4d7d-99c8-605de6557417 tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Acquired lock "refresh_cache-e5c75298-b9ef-4e28-a038-b55d8e198539" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 668.989434] env[62096]: DEBUG nova.network.neutron [None req-95f70777-dc9c-4d7d-99c8-605de6557417 tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 668.990099] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-95f70777-dc9c-4d7d-99c8-605de6557417 tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Expecting reply to msg ab5df9c7f31d420d9930ac77c4abdf39 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 668.999404] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ab5df9c7f31d420d9930ac77c4abdf39 [ 669.143236] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-cdd99b9e-14f8-4543-8c76-d91f63d6fc9c tempest-ServerGroupTestJSON-613982516 tempest-ServerGroupTestJSON-613982516-project-member] Expecting reply to msg 345d5164009146308a87d8104c9c575b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 669.180387] env[62096]: DEBUG nova.network.neutron [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 669.186595] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 345d5164009146308a87d8104c9c575b [ 669.324237] env[62096]: DEBUG nova.network.neutron [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 669.325108] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Expecting reply to msg 398df1e69ee449e8ac34409bd146101c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 669.334664] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 398df1e69ee449e8ac34409bd146101c [ 669.402354] env[62096]: DEBUG oslo_concurrency.lockutils [None req-35a36660-23cc-4f3d-b53e-6fe46ab70d44 tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Acquiring lock "3283ae52-1a1f-4dec-91f3-44cc42361bb5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 669.402556] env[62096]: DEBUG oslo_concurrency.lockutils [None req-35a36660-23cc-4f3d-b53e-6fe46ab70d44 tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Lock "3283ae52-1a1f-4dec-91f3-44cc42361bb5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 669.538226] env[62096]: DEBUG nova.network.neutron [None req-95f70777-dc9c-4d7d-99c8-605de6557417 tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 669.646363] env[62096]: DEBUG nova.compute.manager [None req-cdd99b9e-14f8-4543-8c76-d91f63d6fc9c tempest-ServerGroupTestJSON-613982516 tempest-ServerGroupTestJSON-613982516-project-member] [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] Start spawning the instance on the hypervisor. {{(pid=62096) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 669.652263] env[62096]: DEBUG nova.network.neutron [None req-95f70777-dc9c-4d7d-99c8-605de6557417 tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 669.652320] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-95f70777-dc9c-4d7d-99c8-605de6557417 tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Expecting reply to msg 1d2b8892a3c1471493ff67ec070c44d3 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 669.662895] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1d2b8892a3c1471493ff67ec070c44d3 [ 669.668807] env[62096]: DEBUG nova.virt.hardware [None req-cdd99b9e-14f8-4543-8c76-d91f63d6fc9c tempest-ServerGroupTestJSON-613982516 tempest-ServerGroupTestJSON-613982516-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-19T11:52:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-19T11:51:55Z,direct_url=,disk_format='vmdk',id=fb764baa-3805-45c4-a694-aa91b0932110,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bd76085d004b408bbcf1ebf76ba2593e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-19T11:51:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 669.669040] env[62096]: DEBUG nova.virt.hardware [None req-cdd99b9e-14f8-4543-8c76-d91f63d6fc9c tempest-ServerGroupTestJSON-613982516 tempest-ServerGroupTestJSON-613982516-project-member] Flavor limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 669.669191] env[62096]: DEBUG nova.virt.hardware [None req-cdd99b9e-14f8-4543-8c76-d91f63d6fc9c tempest-ServerGroupTestJSON-613982516 tempest-ServerGroupTestJSON-613982516-project-member] Image limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 669.669369] env[62096]: DEBUG nova.virt.hardware [None req-cdd99b9e-14f8-4543-8c76-d91f63d6fc9c tempest-ServerGroupTestJSON-613982516 tempest-ServerGroupTestJSON-613982516-project-member] Flavor pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 669.669495] env[62096]: DEBUG nova.virt.hardware [None req-cdd99b9e-14f8-4543-8c76-d91f63d6fc9c tempest-ServerGroupTestJSON-613982516 tempest-ServerGroupTestJSON-613982516-project-member] Image pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 669.669635] env[62096]: DEBUG nova.virt.hardware [None req-cdd99b9e-14f8-4543-8c76-d91f63d6fc9c tempest-ServerGroupTestJSON-613982516 tempest-ServerGroupTestJSON-613982516-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 669.669832] env[62096]: DEBUG nova.virt.hardware [None req-cdd99b9e-14f8-4543-8c76-d91f63d6fc9c tempest-ServerGroupTestJSON-613982516 tempest-ServerGroupTestJSON-613982516-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 669.669980] env[62096]: DEBUG nova.virt.hardware [None req-cdd99b9e-14f8-4543-8c76-d91f63d6fc9c tempest-ServerGroupTestJSON-613982516 tempest-ServerGroupTestJSON-613982516-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 669.670136] env[62096]: DEBUG nova.virt.hardware [None req-cdd99b9e-14f8-4543-8c76-d91f63d6fc9c tempest-ServerGroupTestJSON-613982516 tempest-ServerGroupTestJSON-613982516-project-member] Got 1 possible topologies {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 669.670288] env[62096]: DEBUG nova.virt.hardware [None req-cdd99b9e-14f8-4543-8c76-d91f63d6fc9c tempest-ServerGroupTestJSON-613982516 tempest-ServerGroupTestJSON-613982516-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 669.670448] env[62096]: DEBUG nova.virt.hardware [None req-cdd99b9e-14f8-4543-8c76-d91f63d6fc9c tempest-ServerGroupTestJSON-613982516 tempest-ServerGroupTestJSON-613982516-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 669.671562] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcdce6ca-97c2-4481-a16b-c2e7f2c556eb {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.692881] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f4d9baa-165e-425c-a48a-a4a457271172 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.715089] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d53196b-8a22-4954-b66a-2bb70f68991f {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.725376] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a304ea06-27cc-4397-b162-380c51df56d5 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.763614] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b32abe61-601c-493c-918e-08897941c324 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.774723] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d2b2a45-7039-4952-a63c-954352cf38d6 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.789988] env[62096]: DEBUG nova.compute.provider_tree [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 669.790520] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Expecting reply to msg b21d08a323a3493ca5fd92b79d37cd21 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 669.800266] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b21d08a323a3493ca5fd92b79d37cd21 [ 669.801489] env[62096]: DEBUG nova.scheduler.client.report [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 669.804213] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Expecting reply to msg 655e4694eb274005ae3d66549859382d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 669.819055] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 655e4694eb274005ae3d66549859382d [ 669.828899] env[62096]: DEBUG oslo_concurrency.lockutils [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Releasing lock "refresh_cache-c2a06721-6848-4fc4-89da-5d292853b6e9" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 669.828899] env[62096]: DEBUG nova.compute.manager [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 669.829169] env[62096]: DEBUG nova.compute.manager [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 669.829251] env[62096]: DEBUG nova.network.neutron [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 669.857713] env[62096]: DEBUG nova.network.neutron [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 669.857945] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Expecting reply to msg 780835496cdf4da1a317e78611323e79 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 669.865972] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 780835496cdf4da1a317e78611323e79 [ 670.154634] env[62096]: DEBUG oslo_concurrency.lockutils [None req-95f70777-dc9c-4d7d-99c8-605de6557417 tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Releasing lock "refresh_cache-e5c75298-b9ef-4e28-a038-b55d8e198539" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 670.155091] env[62096]: DEBUG nova.compute.manager [None req-95f70777-dc9c-4d7d-99c8-605de6557417 tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] Start destroying the instance on the hypervisor. {{(pid=62096) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 670.155297] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-95f70777-dc9c-4d7d-99c8-605de6557417 tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] Destroying instance {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 670.155974] env[62096]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-56b8738a-092e-436a-90c3-e837370df247 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.164634] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61749f26-0e49-49a6-ab9e-6b6b8216b4d1 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.185927] env[62096]: WARNING nova.virt.vmwareapi.vmops [None req-95f70777-dc9c-4d7d-99c8-605de6557417 tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance e5c75298-b9ef-4e28-a038-b55d8e198539 could not be found. [ 670.186084] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-95f70777-dc9c-4d7d-99c8-605de6557417 tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] Instance destroyed {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 670.186252] env[62096]: INFO nova.compute.manager [None req-95f70777-dc9c-4d7d-99c8-605de6557417 tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] Took 0.03 seconds to destroy the instance on the hypervisor. [ 670.186511] env[62096]: DEBUG oslo.service.loopingcall [None req-95f70777-dc9c-4d7d-99c8-605de6557417 tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 670.186678] env[62096]: DEBUG nova.compute.manager [-] [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 670.186764] env[62096]: DEBUG nova.network.neutron [-] [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 670.289250] env[62096]: DEBUG nova.network.neutron [-] [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 670.306875] env[62096]: DEBUG oslo_concurrency.lockutils [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.650s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 670.307503] env[62096]: ERROR nova.compute.manager [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 169d02ed-e804-4dc5-a136-c6d9178c5dcc, please check neutron logs for more information. [ 670.307503] env[62096]: ERROR nova.compute.manager [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] Traceback (most recent call last): [ 670.307503] env[62096]: ERROR nova.compute.manager [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 670.307503] env[62096]: ERROR nova.compute.manager [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] self.driver.spawn(context, instance, image_meta, [ 670.307503] env[62096]: ERROR nova.compute.manager [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 670.307503] env[62096]: ERROR nova.compute.manager [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 670.307503] env[62096]: ERROR nova.compute.manager [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 670.307503] env[62096]: ERROR nova.compute.manager [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] vm_ref = self.build_virtual_machine(instance, [ 670.307503] env[62096]: ERROR nova.compute.manager [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 670.307503] env[62096]: ERROR nova.compute.manager [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] vif_infos = vmwarevif.get_vif_info(self._session, [ 670.307503] env[62096]: ERROR nova.compute.manager [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 670.307846] env[62096]: ERROR nova.compute.manager [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] for vif in network_info: [ 670.307846] env[62096]: ERROR nova.compute.manager [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 670.307846] env[62096]: ERROR nova.compute.manager [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] return self._sync_wrapper(fn, *args, **kwargs) [ 670.307846] env[62096]: ERROR nova.compute.manager [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 670.307846] env[62096]: ERROR nova.compute.manager [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] self.wait() [ 670.307846] env[62096]: ERROR nova.compute.manager [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 670.307846] env[62096]: ERROR nova.compute.manager [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] self[:] = self._gt.wait() [ 670.307846] env[62096]: ERROR nova.compute.manager [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 670.307846] env[62096]: ERROR nova.compute.manager [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] return self._exit_event.wait() [ 670.307846] env[62096]: ERROR nova.compute.manager [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 670.307846] env[62096]: ERROR nova.compute.manager [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] current.throw(*self._exc) [ 670.307846] env[62096]: ERROR nova.compute.manager [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 670.307846] env[62096]: ERROR nova.compute.manager [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] result = function(*args, **kwargs) [ 670.308189] env[62096]: ERROR nova.compute.manager [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 670.308189] env[62096]: ERROR nova.compute.manager [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] return func(*args, **kwargs) [ 670.308189] env[62096]: ERROR nova.compute.manager [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 670.308189] env[62096]: ERROR nova.compute.manager [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] raise e [ 670.308189] env[62096]: ERROR nova.compute.manager [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 670.308189] env[62096]: ERROR nova.compute.manager [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] nwinfo = self.network_api.allocate_for_instance( [ 670.308189] env[62096]: ERROR nova.compute.manager [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 670.308189] env[62096]: ERROR nova.compute.manager [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] created_port_ids = self._update_ports_for_instance( [ 670.308189] env[62096]: ERROR nova.compute.manager [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 670.308189] env[62096]: ERROR nova.compute.manager [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] with excutils.save_and_reraise_exception(): [ 670.308189] env[62096]: ERROR nova.compute.manager [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 670.308189] env[62096]: ERROR nova.compute.manager [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] self.force_reraise() [ 670.308189] env[62096]: ERROR nova.compute.manager [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 670.308513] env[62096]: ERROR nova.compute.manager [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] raise self.value [ 670.308513] env[62096]: ERROR nova.compute.manager [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 670.308513] env[62096]: ERROR nova.compute.manager [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] updated_port = self._update_port( [ 670.308513] env[62096]: ERROR nova.compute.manager [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 670.308513] env[62096]: ERROR nova.compute.manager [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] _ensure_no_port_binding_failure(port) [ 670.308513] env[62096]: ERROR nova.compute.manager [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 670.308513] env[62096]: ERROR nova.compute.manager [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] raise exception.PortBindingFailed(port_id=port['id']) [ 670.308513] env[62096]: ERROR nova.compute.manager [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] nova.exception.PortBindingFailed: Binding failed for port 169d02ed-e804-4dc5-a136-c6d9178c5dcc, please check neutron logs for more information. [ 670.308513] env[62096]: ERROR nova.compute.manager [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] [ 670.308513] env[62096]: DEBUG nova.compute.utils [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] Binding failed for port 169d02ed-e804-4dc5-a136-c6d9178c5dcc, please check neutron logs for more information. {{(pid=62096) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 670.309991] env[62096]: DEBUG nova.compute.manager [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] Build of instance f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1 was re-scheduled: Binding failed for port 169d02ed-e804-4dc5-a136-c6d9178c5dcc, please check neutron logs for more information. {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 670.310405] env[62096]: DEBUG nova.compute.manager [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] Unplugging VIFs for instance {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 670.310627] env[62096]: DEBUG oslo_concurrency.lockutils [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Acquiring lock "refresh_cache-f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 670.310767] env[62096]: DEBUG oslo_concurrency.lockutils [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Acquired lock "refresh_cache-f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 670.310923] env[62096]: DEBUG nova.network.neutron [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 670.311343] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Expecting reply to msg 0b649c469e634d61bd50371374faf885 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 670.312693] env[62096]: DEBUG oslo_concurrency.lockutils [None req-5ad75242-2efb-4f8b-a5d0-817319fe309a tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.423s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 670.314531] env[62096]: INFO nova.compute.claims [None req-5ad75242-2efb-4f8b-a5d0-817319fe309a tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] [instance: 374ca884-8e77-4568-8667-e124e6df4c75] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 670.323211] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-5ad75242-2efb-4f8b-a5d0-817319fe309a tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Expecting reply to msg c22f60d9c728492ba5cfb6617ecce54e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 670.328497] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0b649c469e634d61bd50371374faf885 [ 670.333023] env[62096]: DEBUG oslo_concurrency.lockutils [None req-534ea4b7-f1b9-4ff7-be62-1798f34a685d tempest-ServerAddressesTestJSON-1842394723 tempest-ServerAddressesTestJSON-1842394723-project-member] Acquiring lock "c928bf83-9517-449a-854c-6f3d8ce4faa0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 670.333246] env[62096]: DEBUG oslo_concurrency.lockutils [None req-534ea4b7-f1b9-4ff7-be62-1798f34a685d tempest-ServerAddressesTestJSON-1842394723 tempest-ServerAddressesTestJSON-1842394723-project-member] Lock "c928bf83-9517-449a-854c-6f3d8ce4faa0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 670.360542] env[62096]: DEBUG nova.network.neutron [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 670.361114] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Expecting reply to msg 232c1d7e47c3436c8ca92d340f42150b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 670.367207] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c22f60d9c728492ba5cfb6617ecce54e [ 670.370823] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 232c1d7e47c3436c8ca92d340f42150b [ 670.460170] env[62096]: DEBUG nova.compute.manager [req-3a383775-75ba-4622-af10-7604dc849003 req-aaaf1dbd-a58a-4042-9ed5-bcd4a5a9bd42 service nova] [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] Received event network-vif-deleted-71f35985-e6b7-42f9-b1d6-19ed301c27b5 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 670.619118] env[62096]: ERROR nova.compute.manager [None req-cdd99b9e-14f8-4543-8c76-d91f63d6fc9c tempest-ServerGroupTestJSON-613982516 tempest-ServerGroupTestJSON-613982516-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 3cd450aa-2fc5-4d09-a571-2b6c9545419d, please check neutron logs for more information. [ 670.619118] env[62096]: ERROR nova.compute.manager Traceback (most recent call last): [ 670.619118] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 670.619118] env[62096]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 670.619118] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 670.619118] env[62096]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 670.619118] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 670.619118] env[62096]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 670.619118] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 670.619118] env[62096]: ERROR nova.compute.manager self.force_reraise() [ 670.619118] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 670.619118] env[62096]: ERROR nova.compute.manager raise self.value [ 670.619118] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 670.619118] env[62096]: ERROR nova.compute.manager updated_port = self._update_port( [ 670.619118] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 670.619118] env[62096]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 670.619581] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 670.619581] env[62096]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 670.619581] env[62096]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 3cd450aa-2fc5-4d09-a571-2b6c9545419d, please check neutron logs for more information. [ 670.619581] env[62096]: ERROR nova.compute.manager [ 670.619581] env[62096]: Traceback (most recent call last): [ 670.619581] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 670.619581] env[62096]: listener.cb(fileno) [ 670.619581] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 670.619581] env[62096]: result = function(*args, **kwargs) [ 670.619581] env[62096]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 670.619581] env[62096]: return func(*args, **kwargs) [ 670.619581] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 670.619581] env[62096]: raise e [ 670.619581] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 670.619581] env[62096]: nwinfo = self.network_api.allocate_for_instance( [ 670.619581] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 670.619581] env[62096]: created_port_ids = self._update_ports_for_instance( [ 670.619581] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 670.619581] env[62096]: with excutils.save_and_reraise_exception(): [ 670.619581] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 670.619581] env[62096]: self.force_reraise() [ 670.619581] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 670.619581] env[62096]: raise self.value [ 670.619581] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 670.619581] env[62096]: updated_port = self._update_port( [ 670.619581] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 670.619581] env[62096]: _ensure_no_port_binding_failure(port) [ 670.619581] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 670.619581] env[62096]: raise exception.PortBindingFailed(port_id=port['id']) [ 670.620340] env[62096]: nova.exception.PortBindingFailed: Binding failed for port 3cd450aa-2fc5-4d09-a571-2b6c9545419d, please check neutron logs for more information. [ 670.620340] env[62096]: Removing descriptor: 16 [ 670.620340] env[62096]: ERROR nova.compute.manager [None req-cdd99b9e-14f8-4543-8c76-d91f63d6fc9c tempest-ServerGroupTestJSON-613982516 tempest-ServerGroupTestJSON-613982516-project-member] [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 3cd450aa-2fc5-4d09-a571-2b6c9545419d, please check neutron logs for more information. [ 670.620340] env[62096]: ERROR nova.compute.manager [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] Traceback (most recent call last): [ 670.620340] env[62096]: ERROR nova.compute.manager [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 670.620340] env[62096]: ERROR nova.compute.manager [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] yield resources [ 670.620340] env[62096]: ERROR nova.compute.manager [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 670.620340] env[62096]: ERROR nova.compute.manager [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] self.driver.spawn(context, instance, image_meta, [ 670.620340] env[62096]: ERROR nova.compute.manager [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 670.620340] env[62096]: ERROR nova.compute.manager [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 670.620340] env[62096]: ERROR nova.compute.manager [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 670.620340] env[62096]: ERROR nova.compute.manager [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] vm_ref = self.build_virtual_machine(instance, [ 670.620670] env[62096]: ERROR nova.compute.manager [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 670.620670] env[62096]: ERROR nova.compute.manager [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] vif_infos = vmwarevif.get_vif_info(self._session, [ 670.620670] env[62096]: ERROR nova.compute.manager [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 670.620670] env[62096]: ERROR nova.compute.manager [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] for vif in network_info: [ 670.620670] env[62096]: ERROR nova.compute.manager [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 670.620670] env[62096]: ERROR nova.compute.manager [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] return self._sync_wrapper(fn, *args, **kwargs) [ 670.620670] env[62096]: ERROR nova.compute.manager [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 670.620670] env[62096]: ERROR nova.compute.manager [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] self.wait() [ 670.620670] env[62096]: ERROR nova.compute.manager [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 670.620670] env[62096]: ERROR nova.compute.manager [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] self[:] = self._gt.wait() [ 670.620670] env[62096]: ERROR nova.compute.manager [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 670.620670] env[62096]: ERROR nova.compute.manager [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] return self._exit_event.wait() [ 670.620670] env[62096]: ERROR nova.compute.manager [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 670.621005] env[62096]: ERROR nova.compute.manager [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] result = hub.switch() [ 670.621005] env[62096]: ERROR nova.compute.manager [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 670.621005] env[62096]: ERROR nova.compute.manager [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] return self.greenlet.switch() [ 670.621005] env[62096]: ERROR nova.compute.manager [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 670.621005] env[62096]: ERROR nova.compute.manager [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] result = function(*args, **kwargs) [ 670.621005] env[62096]: ERROR nova.compute.manager [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 670.621005] env[62096]: ERROR nova.compute.manager [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] return func(*args, **kwargs) [ 670.621005] env[62096]: ERROR nova.compute.manager [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 670.621005] env[62096]: ERROR nova.compute.manager [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] raise e [ 670.621005] env[62096]: ERROR nova.compute.manager [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 670.621005] env[62096]: ERROR nova.compute.manager [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] nwinfo = self.network_api.allocate_for_instance( [ 670.621005] env[62096]: ERROR nova.compute.manager [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 670.621005] env[62096]: ERROR nova.compute.manager [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] created_port_ids = self._update_ports_for_instance( [ 670.621339] env[62096]: ERROR nova.compute.manager [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 670.621339] env[62096]: ERROR nova.compute.manager [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] with excutils.save_and_reraise_exception(): [ 670.621339] env[62096]: ERROR nova.compute.manager [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 670.621339] env[62096]: ERROR nova.compute.manager [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] self.force_reraise() [ 670.621339] env[62096]: ERROR nova.compute.manager [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 670.621339] env[62096]: ERROR nova.compute.manager [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] raise self.value [ 670.621339] env[62096]: ERROR nova.compute.manager [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 670.621339] env[62096]: ERROR nova.compute.manager [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] updated_port = self._update_port( [ 670.621339] env[62096]: ERROR nova.compute.manager [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 670.621339] env[62096]: ERROR nova.compute.manager [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] _ensure_no_port_binding_failure(port) [ 670.621339] env[62096]: ERROR nova.compute.manager [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 670.621339] env[62096]: ERROR nova.compute.manager [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] raise exception.PortBindingFailed(port_id=port['id']) [ 670.621652] env[62096]: ERROR nova.compute.manager [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] nova.exception.PortBindingFailed: Binding failed for port 3cd450aa-2fc5-4d09-a571-2b6c9545419d, please check neutron logs for more information. [ 670.621652] env[62096]: ERROR nova.compute.manager [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] [ 670.621652] env[62096]: INFO nova.compute.manager [None req-cdd99b9e-14f8-4543-8c76-d91f63d6fc9c tempest-ServerGroupTestJSON-613982516 tempest-ServerGroupTestJSON-613982516-project-member] [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] Terminating instance [ 670.632550] env[62096]: DEBUG oslo_concurrency.lockutils [None req-cdd99b9e-14f8-4543-8c76-d91f63d6fc9c tempest-ServerGroupTestJSON-613982516 tempest-ServerGroupTestJSON-613982516-project-member] Acquiring lock "refresh_cache-641e5b3a-15ef-4d78-8339-7a26494038d9" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 670.632751] env[62096]: DEBUG oslo_concurrency.lockutils [None req-cdd99b9e-14f8-4543-8c76-d91f63d6fc9c tempest-ServerGroupTestJSON-613982516 tempest-ServerGroupTestJSON-613982516-project-member] Acquired lock "refresh_cache-641e5b3a-15ef-4d78-8339-7a26494038d9" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 670.632925] env[62096]: DEBUG nova.network.neutron [None req-cdd99b9e-14f8-4543-8c76-d91f63d6fc9c tempest-ServerGroupTestJSON-613982516 tempest-ServerGroupTestJSON-613982516-project-member] [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 670.633551] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-cdd99b9e-14f8-4543-8c76-d91f63d6fc9c tempest-ServerGroupTestJSON-613982516 tempest-ServerGroupTestJSON-613982516-project-member] Expecting reply to msg a5dd2aa6e8eb43f697c3c903589fe37d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 670.638897] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 254606a16be84380a388bdfc3927672b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 670.641351] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a5dd2aa6e8eb43f697c3c903589fe37d [ 670.645002] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 254606a16be84380a388bdfc3927672b [ 670.827296] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-5ad75242-2efb-4f8b-a5d0-817319fe309a tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Expecting reply to msg b2eee330179e418bb053fc16642ad18b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 670.828835] env[62096]: DEBUG nova.network.neutron [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 670.837567] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b2eee330179e418bb053fc16642ad18b [ 670.863667] env[62096]: INFO nova.compute.manager [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: c2a06721-6848-4fc4-89da-5d292853b6e9] Took 1.03 seconds to deallocate network for instance. [ 670.865487] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Expecting reply to msg fe68242e222f474f9b3580087b20e350 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 670.897715] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fe68242e222f474f9b3580087b20e350 [ 670.904489] env[62096]: DEBUG nova.network.neutron [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 670.905107] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Expecting reply to msg 8afcd72e585f4dd798b12386b5c8e8ac in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 670.915565] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8afcd72e585f4dd798b12386b5c8e8ac [ 671.139308] env[62096]: DEBUG nova.network.neutron [-] [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 671.139739] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg ff4fc664038e4b07902c4071b50c9019 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 671.149366] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ff4fc664038e4b07902c4071b50c9019 [ 671.150424] env[62096]: DEBUG nova.network.neutron [None req-cdd99b9e-14f8-4543-8c76-d91f63d6fc9c tempest-ServerGroupTestJSON-613982516 tempest-ServerGroupTestJSON-613982516-project-member] [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 671.232338] env[62096]: DEBUG nova.network.neutron [None req-cdd99b9e-14f8-4543-8c76-d91f63d6fc9c tempest-ServerGroupTestJSON-613982516 tempest-ServerGroupTestJSON-613982516-project-member] [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 671.232876] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-cdd99b9e-14f8-4543-8c76-d91f63d6fc9c tempest-ServerGroupTestJSON-613982516 tempest-ServerGroupTestJSON-613982516-project-member] Expecting reply to msg e0ed4de784614624a16f3df2709f1fa5 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 671.241357] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e0ed4de784614624a16f3df2709f1fa5 [ 671.374562] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Expecting reply to msg 051049ca888d42cbafb9b4ed9b5e629c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 671.410316] env[62096]: DEBUG oslo_concurrency.lockutils [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Releasing lock "refresh_cache-f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 671.410316] env[62096]: DEBUG nova.compute.manager [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 671.410316] env[62096]: DEBUG nova.compute.manager [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 671.410316] env[62096]: DEBUG nova.network.neutron [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 671.428658] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 051049ca888d42cbafb9b4ed9b5e629c [ 671.440242] env[62096]: DEBUG nova.network.neutron [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 671.440242] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Expecting reply to msg c35e6ee82c164bcd9fbfb37f59ff2cd0 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 671.442915] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c35e6ee82c164bcd9fbfb37f59ff2cd0 [ 671.644041] env[62096]: INFO nova.compute.manager [-] [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] Took 1.46 seconds to deallocate network for instance. [ 671.646771] env[62096]: DEBUG nova.compute.claims [None req-95f70777-dc9c-4d7d-99c8-605de6557417 tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] Aborting claim: {{(pid=62096) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 671.647137] env[62096]: DEBUG oslo_concurrency.lockutils [None req-95f70777-dc9c-4d7d-99c8-605de6557417 tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 671.741556] env[62096]: DEBUG oslo_concurrency.lockutils [None req-cdd99b9e-14f8-4543-8c76-d91f63d6fc9c tempest-ServerGroupTestJSON-613982516 tempest-ServerGroupTestJSON-613982516-project-member] Releasing lock "refresh_cache-641e5b3a-15ef-4d78-8339-7a26494038d9" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 671.742141] env[62096]: DEBUG nova.compute.manager [None req-cdd99b9e-14f8-4543-8c76-d91f63d6fc9c tempest-ServerGroupTestJSON-613982516 tempest-ServerGroupTestJSON-613982516-project-member] [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] Start destroying the instance on the hypervisor. {{(pid=62096) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 671.742510] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-cdd99b9e-14f8-4543-8c76-d91f63d6fc9c tempest-ServerGroupTestJSON-613982516 tempest-ServerGroupTestJSON-613982516-project-member] [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] Destroying instance {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 671.742941] env[62096]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-94d93120-2ac1-42ee-b91b-359645046b70 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.749470] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84bf1949-0c01-40c4-87ce-2dab91338e01 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.756914] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8e16d88-b33c-4289-b0d4-8c003ecacac3 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.772393] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c399b87-d9ac-4931-904f-71d02022aa92 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.780684] env[62096]: WARNING nova.virt.vmwareapi.vmops [None req-cdd99b9e-14f8-4543-8c76-d91f63d6fc9c tempest-ServerGroupTestJSON-613982516 tempest-ServerGroupTestJSON-613982516-project-member] [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 641e5b3a-15ef-4d78-8339-7a26494038d9 could not be found. [ 671.781056] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-cdd99b9e-14f8-4543-8c76-d91f63d6fc9c tempest-ServerGroupTestJSON-613982516 tempest-ServerGroupTestJSON-613982516-project-member] [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] Instance destroyed {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 671.781379] env[62096]: INFO nova.compute.manager [None req-cdd99b9e-14f8-4543-8c76-d91f63d6fc9c tempest-ServerGroupTestJSON-613982516 tempest-ServerGroupTestJSON-613982516-project-member] [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] Took 0.04 seconds to destroy the instance on the hypervisor. [ 671.781767] env[62096]: DEBUG oslo.service.loopingcall [None req-cdd99b9e-14f8-4543-8c76-d91f63d6fc9c tempest-ServerGroupTestJSON-613982516 tempest-ServerGroupTestJSON-613982516-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 671.816839] env[62096]: DEBUG nova.compute.manager [-] [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 671.816951] env[62096]: DEBUG nova.network.neutron [-] [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 671.819545] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d0b6ef2-3419-4a15-98a5-6cf303a94472 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.833727] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c8611ad-0477-4ece-8097-d81afc7d16cf {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.838240] env[62096]: DEBUG nova.network.neutron [-] [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 671.838768] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 7153778544e240afb107a45ff5085388 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 671.849044] env[62096]: DEBUG nova.compute.provider_tree [None req-5ad75242-2efb-4f8b-a5d0-817319fe309a tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 671.849576] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-5ad75242-2efb-4f8b-a5d0-817319fe309a tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Expecting reply to msg 587b607769b348fca98bc8ad6d146948 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 671.854522] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7153778544e240afb107a45ff5085388 [ 671.856621] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 587b607769b348fca98bc8ad6d146948 [ 671.895050] env[62096]: INFO nova.scheduler.client.report [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Deleted allocations for instance c2a06721-6848-4fc4-89da-5d292853b6e9 [ 671.904458] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Expecting reply to msg a07f7d7b25994007aea2fc3e1d00f446 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 671.928214] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a07f7d7b25994007aea2fc3e1d00f446 [ 671.938108] env[62096]: DEBUG nova.network.neutron [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 671.938671] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Expecting reply to msg d9923ba82d2342f1a63629ad1dbf0287 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 671.945955] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d9923ba82d2342f1a63629ad1dbf0287 [ 672.341447] env[62096]: DEBUG nova.network.neutron [-] [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 672.341985] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 30da9c7ed06a47a98de367659db0b703 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 672.350262] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 30da9c7ed06a47a98de367659db0b703 [ 672.352350] env[62096]: DEBUG nova.scheduler.client.report [None req-5ad75242-2efb-4f8b-a5d0-817319fe309a tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 672.355582] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-5ad75242-2efb-4f8b-a5d0-817319fe309a tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Expecting reply to msg e279eedd0a414f37bffbc909d926e716 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 672.364946] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e279eedd0a414f37bffbc909d926e716 [ 672.406621] env[62096]: DEBUG oslo_concurrency.lockutils [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Lock "c2a06721-6848-4fc4-89da-5d292853b6e9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 95.644s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 672.407253] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-f283c87f-a0d6-46b3-ba4c-0ee08c3c4930 tempest-ImagesOneServerNegativeTestJSON-1092536401 tempest-ImagesOneServerNegativeTestJSON-1092536401-project-member] Expecting reply to msg efb03a83ff954770b8252bd229693a64 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 672.416957] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg efb03a83ff954770b8252bd229693a64 [ 672.442204] env[62096]: INFO nova.compute.manager [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1] Took 1.03 seconds to deallocate network for instance. [ 672.443962] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Expecting reply to msg 4e62ac14742a4eb39f9e1baeb1476f90 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 672.477307] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4e62ac14742a4eb39f9e1baeb1476f90 [ 672.507078] env[62096]: DEBUG nova.compute.manager [req-28091b3e-327e-4bf4-806b-07c641f4f4d7 req-b1a75f6c-adb6-4753-8c5d-f81c4abbd221 service nova] [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] Received event network-changed-3cd450aa-2fc5-4d09-a571-2b6c9545419d {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 672.507296] env[62096]: DEBUG nova.compute.manager [req-28091b3e-327e-4bf4-806b-07c641f4f4d7 req-b1a75f6c-adb6-4753-8c5d-f81c4abbd221 service nova] [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] Refreshing instance network info cache due to event network-changed-3cd450aa-2fc5-4d09-a571-2b6c9545419d. {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 672.507489] env[62096]: DEBUG oslo_concurrency.lockutils [req-28091b3e-327e-4bf4-806b-07c641f4f4d7 req-b1a75f6c-adb6-4753-8c5d-f81c4abbd221 service nova] Acquiring lock "refresh_cache-641e5b3a-15ef-4d78-8339-7a26494038d9" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 672.507629] env[62096]: DEBUG oslo_concurrency.lockutils [req-28091b3e-327e-4bf4-806b-07c641f4f4d7 req-b1a75f6c-adb6-4753-8c5d-f81c4abbd221 service nova] Acquired lock "refresh_cache-641e5b3a-15ef-4d78-8339-7a26494038d9" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 672.507786] env[62096]: DEBUG nova.network.neutron [req-28091b3e-327e-4bf4-806b-07c641f4f4d7 req-b1a75f6c-adb6-4753-8c5d-f81c4abbd221 service nova] [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] Refreshing network info cache for port 3cd450aa-2fc5-4d09-a571-2b6c9545419d {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 672.508231] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-28091b3e-327e-4bf4-806b-07c641f4f4d7 req-b1a75f6c-adb6-4753-8c5d-f81c4abbd221 service nova] Expecting reply to msg d235bd719d9a4772b156c6ed5309ca51 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 672.515326] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d235bd719d9a4772b156c6ed5309ca51 [ 672.844534] env[62096]: INFO nova.compute.manager [-] [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] Took 1.03 seconds to deallocate network for instance. [ 672.846922] env[62096]: DEBUG nova.compute.claims [None req-cdd99b9e-14f8-4543-8c76-d91f63d6fc9c tempest-ServerGroupTestJSON-613982516 tempest-ServerGroupTestJSON-613982516-project-member] [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] Aborting claim: {{(pid=62096) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 672.847096] env[62096]: DEBUG oslo_concurrency.lockutils [None req-cdd99b9e-14f8-4543-8c76-d91f63d6fc9c tempest-ServerGroupTestJSON-613982516 tempest-ServerGroupTestJSON-613982516-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 672.856966] env[62096]: DEBUG oslo_concurrency.lockutils [None req-5ad75242-2efb-4f8b-a5d0-817319fe309a tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.544s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 672.857458] env[62096]: DEBUG nova.compute.manager [None req-5ad75242-2efb-4f8b-a5d0-817319fe309a tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] [instance: 374ca884-8e77-4568-8667-e124e6df4c75] Start building networks asynchronously for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 672.859189] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-5ad75242-2efb-4f8b-a5d0-817319fe309a tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Expecting reply to msg 4e58e536637f41c2a78f6070707e648c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 672.860178] env[62096]: DEBUG oslo_concurrency.lockutils [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 17.525s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 672.860961] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg 21ef92d0c86c445e8af5650e3c0290ee in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 672.892884] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4e58e536637f41c2a78f6070707e648c [ 672.899965] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 21ef92d0c86c445e8af5650e3c0290ee [ 672.909381] env[62096]: DEBUG nova.compute.manager [None req-f283c87f-a0d6-46b3-ba4c-0ee08c3c4930 tempest-ImagesOneServerNegativeTestJSON-1092536401 tempest-ImagesOneServerNegativeTestJSON-1092536401-project-member] [instance: 7d62e302-8080-4699-b88d-cb29031e6707] Starting instance... {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 672.911016] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-f283c87f-a0d6-46b3-ba4c-0ee08c3c4930 tempest-ImagesOneServerNegativeTestJSON-1092536401 tempest-ImagesOneServerNegativeTestJSON-1092536401-project-member] Expecting reply to msg dc1f8ced552049c69f127d5a287d5d3a in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 672.946490] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dc1f8ced552049c69f127d5a287d5d3a [ 672.949033] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Expecting reply to msg 3b48bcbb183e4411acedf55856c6dcc3 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 672.977616] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3b48bcbb183e4411acedf55856c6dcc3 [ 673.025128] env[62096]: DEBUG nova.network.neutron [req-28091b3e-327e-4bf4-806b-07c641f4f4d7 req-b1a75f6c-adb6-4753-8c5d-f81c4abbd221 service nova] [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 673.110616] env[62096]: DEBUG nova.network.neutron [req-28091b3e-327e-4bf4-806b-07c641f4f4d7 req-b1a75f6c-adb6-4753-8c5d-f81c4abbd221 service nova] [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 673.111158] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-28091b3e-327e-4bf4-806b-07c641f4f4d7 req-b1a75f6c-adb6-4753-8c5d-f81c4abbd221 service nova] Expecting reply to msg 7130599b60094bb0a7ae205dcdba763e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 673.119171] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7130599b60094bb0a7ae205dcdba763e [ 673.364017] env[62096]: DEBUG nova.compute.utils [None req-5ad75242-2efb-4f8b-a5d0-817319fe309a tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Using /dev/sd instead of None {{(pid=62096) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 673.364685] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-5ad75242-2efb-4f8b-a5d0-817319fe309a tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Expecting reply to msg c9c27b56a85e4c45acba07a7f4f3a7f4 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 673.365681] env[62096]: DEBUG nova.compute.manager [None req-5ad75242-2efb-4f8b-a5d0-817319fe309a tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] [instance: 374ca884-8e77-4568-8667-e124e6df4c75] Allocating IP information in the background. {{(pid=62096) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 673.365795] env[62096]: DEBUG nova.network.neutron [None req-5ad75242-2efb-4f8b-a5d0-817319fe309a tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] [instance: 374ca884-8e77-4568-8667-e124e6df4c75] allocate_for_instance() {{(pid=62096) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 673.376940] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg a5507940b5e4422bbc5c077719f8385b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 673.378005] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c9c27b56a85e4c45acba07a7f4f3a7f4 [ 673.387551] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a5507940b5e4422bbc5c077719f8385b [ 673.406677] env[62096]: DEBUG nova.policy [None req-5ad75242-2efb-4f8b-a5d0-817319fe309a tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '480141f764ae4387a7740719160c9ddd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b8386168ae3843e58ae487f59e81fc4f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62096) authorize /opt/stack/nova/nova/policy.py:203}} [ 673.427319] env[62096]: DEBUG oslo_concurrency.lockutils [None req-f283c87f-a0d6-46b3-ba4c-0ee08c3c4930 tempest-ImagesOneServerNegativeTestJSON-1092536401 tempest-ImagesOneServerNegativeTestJSON-1092536401-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 673.477029] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Expecting reply to msg a3af3472c5c74615bf989a9f8777e545 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 673.495376] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a3af3472c5c74615bf989a9f8777e545 [ 673.613864] env[62096]: DEBUG oslo_concurrency.lockutils [req-28091b3e-327e-4bf4-806b-07c641f4f4d7 req-b1a75f6c-adb6-4753-8c5d-f81c4abbd221 service nova] Releasing lock "refresh_cache-641e5b3a-15ef-4d78-8339-7a26494038d9" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 673.613864] env[62096]: DEBUG nova.compute.manager [req-28091b3e-327e-4bf4-806b-07c641f4f4d7 req-b1a75f6c-adb6-4753-8c5d-f81c4abbd221 service nova] [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] Received event network-vif-deleted-3cd450aa-2fc5-4d09-a571-2b6c9545419d {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 673.707738] env[62096]: DEBUG nova.network.neutron [None req-5ad75242-2efb-4f8b-a5d0-817319fe309a tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] [instance: 374ca884-8e77-4568-8667-e124e6df4c75] Successfully created port: 7158528a-233c-4f68-8cce-fcc21e6131a0 {{(pid=62096) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 673.869315] env[62096]: DEBUG nova.compute.manager [None req-5ad75242-2efb-4f8b-a5d0-817319fe309a tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] [instance: 374ca884-8e77-4568-8667-e124e6df4c75] Start building block device mappings for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 673.871175] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-5ad75242-2efb-4f8b-a5d0-817319fe309a tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Expecting reply to msg 902adbeeee5740e483c252287169adf4 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 673.898237] env[62096]: WARNING nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Instance 06b9105a-8dcb-4bff-bba2-05e179036f24 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 673.898389] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Instance 62b778aa-71c7-480b-8148-017773246caf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62096) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 673.898507] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Instance e5c75298-b9ef-4e28-a038-b55d8e198539 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62096) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 673.898631] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Instance 641e5b3a-15ef-4d78-8339-7a26494038d9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62096) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 673.898733] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Instance 374ca884-8e77-4568-8667-e124e6df4c75 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62096) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 673.899311] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg e62c9085467f4e90b478f2cfeec9d4a0 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 673.910284] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e62c9085467f4e90b478f2cfeec9d4a0 [ 673.910885] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 902adbeeee5740e483c252287169adf4 [ 673.982261] env[62096]: DEBUG oslo_concurrency.lockutils [None req-780709b4-5943-4dac-ba4d-737b2ce3aa53 tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Lock "f0a3c0b6-47eb-4c92-acf6-efcc30b5eda1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 97.179s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 673.983157] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1aa056a4-9f55-42a7-a362-b5b727b382b0 tempest-ServersTestManualDisk-1610766175 tempest-ServersTestManualDisk-1610766175-project-member] Expecting reply to msg 8031f06475e74047b35233068da95a5f in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 673.995673] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8031f06475e74047b35233068da95a5f [ 674.293045] env[62096]: DEBUG oslo_concurrency.lockutils [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Acquiring lock "a5b5afc7-abc0-430a-b682-2c1946b4a6d8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 674.293367] env[62096]: DEBUG oslo_concurrency.lockutils [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Lock "a5b5afc7-abc0-430a-b682-2c1946b4a6d8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 674.326544] env[62096]: DEBUG oslo_concurrency.lockutils [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Acquiring lock "4ce3f48e-e45c-4628-8c35-8493c655a6f3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 674.326805] env[62096]: DEBUG oslo_concurrency.lockutils [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Lock "4ce3f48e-e45c-4628-8c35-8493c655a6f3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 674.376914] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-5ad75242-2efb-4f8b-a5d0-817319fe309a tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Expecting reply to msg 92bec584dfef424d9f11656d2bc32f06 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 674.379786] env[62096]: DEBUG nova.compute.manager [req-8515d434-f0a2-4bd2-bddd-3c81cba51d14 req-699ac484-bc85-45f9-a4d0-d3266ff2e824 service nova] [instance: 374ca884-8e77-4568-8667-e124e6df4c75] Received event network-changed-7158528a-233c-4f68-8cce-fcc21e6131a0 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 674.380115] env[62096]: DEBUG nova.compute.manager [req-8515d434-f0a2-4bd2-bddd-3c81cba51d14 req-699ac484-bc85-45f9-a4d0-d3266ff2e824 service nova] [instance: 374ca884-8e77-4568-8667-e124e6df4c75] Refreshing instance network info cache due to event network-changed-7158528a-233c-4f68-8cce-fcc21e6131a0. {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 674.380492] env[62096]: DEBUG oslo_concurrency.lockutils [req-8515d434-f0a2-4bd2-bddd-3c81cba51d14 req-699ac484-bc85-45f9-a4d0-d3266ff2e824 service nova] Acquiring lock "refresh_cache-374ca884-8e77-4568-8667-e124e6df4c75" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 674.380686] env[62096]: DEBUG oslo_concurrency.lockutils [req-8515d434-f0a2-4bd2-bddd-3c81cba51d14 req-699ac484-bc85-45f9-a4d0-d3266ff2e824 service nova] Acquired lock "refresh_cache-374ca884-8e77-4568-8667-e124e6df4c75" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 674.380888] env[62096]: DEBUG nova.network.neutron [req-8515d434-f0a2-4bd2-bddd-3c81cba51d14 req-699ac484-bc85-45f9-a4d0-d3266ff2e824 service nova] [instance: 374ca884-8e77-4568-8667-e124e6df4c75] Refreshing network info cache for port 7158528a-233c-4f68-8cce-fcc21e6131a0 {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 674.381320] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-8515d434-f0a2-4bd2-bddd-3c81cba51d14 req-699ac484-bc85-45f9-a4d0-d3266ff2e824 service nova] Expecting reply to msg 76f28dbdc3dd46099b29d6342244444e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 674.394269] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 76f28dbdc3dd46099b29d6342244444e [ 674.401707] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Instance 575e00ca-0f97-42c5-9e4d-706c21453210 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62096) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 674.402255] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg 742b6cc6196c4be7a5f194c13523dbf7 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 674.413612] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 742b6cc6196c4be7a5f194c13523dbf7 [ 674.417723] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 92bec584dfef424d9f11656d2bc32f06 [ 674.485786] env[62096]: DEBUG nova.compute.manager [None req-1aa056a4-9f55-42a7-a362-b5b727b382b0 tempest-ServersTestManualDisk-1610766175 tempest-ServersTestManualDisk-1610766175-project-member] [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] Starting instance... {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 674.487675] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1aa056a4-9f55-42a7-a362-b5b727b382b0 tempest-ServersTestManualDisk-1610766175 tempest-ServersTestManualDisk-1610766175-project-member] Expecting reply to msg e6fe214b44cc4a11b931f1ff3c67c6f6 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 674.532590] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e6fe214b44cc4a11b931f1ff3c67c6f6 [ 674.560601] env[62096]: ERROR nova.compute.manager [None req-5ad75242-2efb-4f8b-a5d0-817319fe309a tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 7158528a-233c-4f68-8cce-fcc21e6131a0, please check neutron logs for more information. [ 674.560601] env[62096]: ERROR nova.compute.manager Traceback (most recent call last): [ 674.560601] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 674.560601] env[62096]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 674.560601] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 674.560601] env[62096]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 674.560601] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 674.560601] env[62096]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 674.560601] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 674.560601] env[62096]: ERROR nova.compute.manager self.force_reraise() [ 674.560601] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 674.560601] env[62096]: ERROR nova.compute.manager raise self.value [ 674.560601] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 674.560601] env[62096]: ERROR nova.compute.manager updated_port = self._update_port( [ 674.560601] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 674.560601] env[62096]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 674.561115] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 674.561115] env[62096]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 674.561115] env[62096]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 7158528a-233c-4f68-8cce-fcc21e6131a0, please check neutron logs for more information. [ 674.561115] env[62096]: ERROR nova.compute.manager [ 674.561115] env[62096]: Traceback (most recent call last): [ 674.561115] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 674.561115] env[62096]: listener.cb(fileno) [ 674.561115] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 674.561115] env[62096]: result = function(*args, **kwargs) [ 674.561115] env[62096]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 674.561115] env[62096]: return func(*args, **kwargs) [ 674.561115] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 674.561115] env[62096]: raise e [ 674.561115] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 674.561115] env[62096]: nwinfo = self.network_api.allocate_for_instance( [ 674.561115] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 674.561115] env[62096]: created_port_ids = self._update_ports_for_instance( [ 674.561115] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 674.561115] env[62096]: with excutils.save_and_reraise_exception(): [ 674.561115] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 674.561115] env[62096]: self.force_reraise() [ 674.561115] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 674.561115] env[62096]: raise self.value [ 674.561115] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 674.561115] env[62096]: updated_port = self._update_port( [ 674.561115] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 674.561115] env[62096]: _ensure_no_port_binding_failure(port) [ 674.561115] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 674.561115] env[62096]: raise exception.PortBindingFailed(port_id=port['id']) [ 674.562066] env[62096]: nova.exception.PortBindingFailed: Binding failed for port 7158528a-233c-4f68-8cce-fcc21e6131a0, please check neutron logs for more information. [ 674.562066] env[62096]: Removing descriptor: 14 [ 674.884632] env[62096]: DEBUG nova.compute.manager [None req-5ad75242-2efb-4f8b-a5d0-817319fe309a tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] [instance: 374ca884-8e77-4568-8667-e124e6df4c75] Start spawning the instance on the hypervisor. {{(pid=62096) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 674.905608] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Instance 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62096) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 674.906056] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg 08eb74743c8644cab2ae2d4c0437ae33 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 674.907633] env[62096]: DEBUG nova.network.neutron [req-8515d434-f0a2-4bd2-bddd-3c81cba51d14 req-699ac484-bc85-45f9-a4d0-d3266ff2e824 service nova] [instance: 374ca884-8e77-4568-8667-e124e6df4c75] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 674.919423] env[62096]: DEBUG nova.virt.hardware [None req-5ad75242-2efb-4f8b-a5d0-817319fe309a tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-19T11:52:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-19T11:51:55Z,direct_url=,disk_format='vmdk',id=fb764baa-3805-45c4-a694-aa91b0932110,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bd76085d004b408bbcf1ebf76ba2593e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-19T11:51:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 674.919673] env[62096]: DEBUG nova.virt.hardware [None req-5ad75242-2efb-4f8b-a5d0-817319fe309a tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Flavor limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 674.919818] env[62096]: DEBUG nova.virt.hardware [None req-5ad75242-2efb-4f8b-a5d0-817319fe309a tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Image limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 674.919989] env[62096]: DEBUG nova.virt.hardware [None req-5ad75242-2efb-4f8b-a5d0-817319fe309a tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Flavor pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 674.920139] env[62096]: DEBUG nova.virt.hardware [None req-5ad75242-2efb-4f8b-a5d0-817319fe309a tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Image pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 674.920276] env[62096]: DEBUG nova.virt.hardware [None req-5ad75242-2efb-4f8b-a5d0-817319fe309a tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 674.920472] env[62096]: DEBUG nova.virt.hardware [None req-5ad75242-2efb-4f8b-a5d0-817319fe309a tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 674.920622] env[62096]: DEBUG nova.virt.hardware [None req-5ad75242-2efb-4f8b-a5d0-817319fe309a tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 674.920789] env[62096]: DEBUG nova.virt.hardware [None req-5ad75242-2efb-4f8b-a5d0-817319fe309a tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Got 1 possible topologies {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 674.920938] env[62096]: DEBUG nova.virt.hardware [None req-5ad75242-2efb-4f8b-a5d0-817319fe309a tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 674.921108] env[62096]: DEBUG nova.virt.hardware [None req-5ad75242-2efb-4f8b-a5d0-817319fe309a tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 674.922055] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c84d9b5e-d01e-4a79-b9b6-a471f41adf8c {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.925664] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 08eb74743c8644cab2ae2d4c0437ae33 [ 674.932021] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-017fbefd-aa91-4b29-923b-553e7887773a {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.949558] env[62096]: ERROR nova.compute.manager [None req-5ad75242-2efb-4f8b-a5d0-817319fe309a tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] [instance: 374ca884-8e77-4568-8667-e124e6df4c75] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 7158528a-233c-4f68-8cce-fcc21e6131a0, please check neutron logs for more information. [ 674.949558] env[62096]: ERROR nova.compute.manager [instance: 374ca884-8e77-4568-8667-e124e6df4c75] Traceback (most recent call last): [ 674.949558] env[62096]: ERROR nova.compute.manager [instance: 374ca884-8e77-4568-8667-e124e6df4c75] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 674.949558] env[62096]: ERROR nova.compute.manager [instance: 374ca884-8e77-4568-8667-e124e6df4c75] yield resources [ 674.949558] env[62096]: ERROR nova.compute.manager [instance: 374ca884-8e77-4568-8667-e124e6df4c75] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 674.949558] env[62096]: ERROR nova.compute.manager [instance: 374ca884-8e77-4568-8667-e124e6df4c75] self.driver.spawn(context, instance, image_meta, [ 674.949558] env[62096]: ERROR nova.compute.manager [instance: 374ca884-8e77-4568-8667-e124e6df4c75] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 674.949558] env[62096]: ERROR nova.compute.manager [instance: 374ca884-8e77-4568-8667-e124e6df4c75] self._vmops.spawn(context, instance, image_meta, injected_files, [ 674.949558] env[62096]: ERROR nova.compute.manager [instance: 374ca884-8e77-4568-8667-e124e6df4c75] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 674.949558] env[62096]: ERROR nova.compute.manager [instance: 374ca884-8e77-4568-8667-e124e6df4c75] vm_ref = self.build_virtual_machine(instance, [ 674.949558] env[62096]: ERROR nova.compute.manager [instance: 374ca884-8e77-4568-8667-e124e6df4c75] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 674.949952] env[62096]: ERROR nova.compute.manager [instance: 374ca884-8e77-4568-8667-e124e6df4c75] vif_infos = vmwarevif.get_vif_info(self._session, [ 674.949952] env[62096]: ERROR nova.compute.manager [instance: 374ca884-8e77-4568-8667-e124e6df4c75] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 674.949952] env[62096]: ERROR nova.compute.manager [instance: 374ca884-8e77-4568-8667-e124e6df4c75] for vif in network_info: [ 674.949952] env[62096]: ERROR nova.compute.manager [instance: 374ca884-8e77-4568-8667-e124e6df4c75] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 674.949952] env[62096]: ERROR nova.compute.manager [instance: 374ca884-8e77-4568-8667-e124e6df4c75] return self._sync_wrapper(fn, *args, **kwargs) [ 674.949952] env[62096]: ERROR nova.compute.manager [instance: 374ca884-8e77-4568-8667-e124e6df4c75] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 674.949952] env[62096]: ERROR nova.compute.manager [instance: 374ca884-8e77-4568-8667-e124e6df4c75] self.wait() [ 674.949952] env[62096]: ERROR nova.compute.manager [instance: 374ca884-8e77-4568-8667-e124e6df4c75] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 674.949952] env[62096]: ERROR nova.compute.manager [instance: 374ca884-8e77-4568-8667-e124e6df4c75] self[:] = self._gt.wait() [ 674.949952] env[62096]: ERROR nova.compute.manager [instance: 374ca884-8e77-4568-8667-e124e6df4c75] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 674.949952] env[62096]: ERROR nova.compute.manager [instance: 374ca884-8e77-4568-8667-e124e6df4c75] return self._exit_event.wait() [ 674.949952] env[62096]: ERROR nova.compute.manager [instance: 374ca884-8e77-4568-8667-e124e6df4c75] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 674.949952] env[62096]: ERROR nova.compute.manager [instance: 374ca884-8e77-4568-8667-e124e6df4c75] current.throw(*self._exc) [ 674.950298] env[62096]: ERROR nova.compute.manager [instance: 374ca884-8e77-4568-8667-e124e6df4c75] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 674.950298] env[62096]: ERROR nova.compute.manager [instance: 374ca884-8e77-4568-8667-e124e6df4c75] result = function(*args, **kwargs) [ 674.950298] env[62096]: ERROR nova.compute.manager [instance: 374ca884-8e77-4568-8667-e124e6df4c75] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 674.950298] env[62096]: ERROR nova.compute.manager [instance: 374ca884-8e77-4568-8667-e124e6df4c75] return func(*args, **kwargs) [ 674.950298] env[62096]: ERROR nova.compute.manager [instance: 374ca884-8e77-4568-8667-e124e6df4c75] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 674.950298] env[62096]: ERROR nova.compute.manager [instance: 374ca884-8e77-4568-8667-e124e6df4c75] raise e [ 674.950298] env[62096]: ERROR nova.compute.manager [instance: 374ca884-8e77-4568-8667-e124e6df4c75] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 674.950298] env[62096]: ERROR nova.compute.manager [instance: 374ca884-8e77-4568-8667-e124e6df4c75] nwinfo = self.network_api.allocate_for_instance( [ 674.950298] env[62096]: ERROR nova.compute.manager [instance: 374ca884-8e77-4568-8667-e124e6df4c75] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 674.950298] env[62096]: ERROR nova.compute.manager [instance: 374ca884-8e77-4568-8667-e124e6df4c75] created_port_ids = self._update_ports_for_instance( [ 674.950298] env[62096]: ERROR nova.compute.manager [instance: 374ca884-8e77-4568-8667-e124e6df4c75] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 674.950298] env[62096]: ERROR nova.compute.manager [instance: 374ca884-8e77-4568-8667-e124e6df4c75] with excutils.save_and_reraise_exception(): [ 674.950298] env[62096]: ERROR nova.compute.manager [instance: 374ca884-8e77-4568-8667-e124e6df4c75] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 674.950635] env[62096]: ERROR nova.compute.manager [instance: 374ca884-8e77-4568-8667-e124e6df4c75] self.force_reraise() [ 674.950635] env[62096]: ERROR nova.compute.manager [instance: 374ca884-8e77-4568-8667-e124e6df4c75] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 674.950635] env[62096]: ERROR nova.compute.manager [instance: 374ca884-8e77-4568-8667-e124e6df4c75] raise self.value [ 674.950635] env[62096]: ERROR nova.compute.manager [instance: 374ca884-8e77-4568-8667-e124e6df4c75] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 674.950635] env[62096]: ERROR nova.compute.manager [instance: 374ca884-8e77-4568-8667-e124e6df4c75] updated_port = self._update_port( [ 674.950635] env[62096]: ERROR nova.compute.manager [instance: 374ca884-8e77-4568-8667-e124e6df4c75] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 674.950635] env[62096]: ERROR nova.compute.manager [instance: 374ca884-8e77-4568-8667-e124e6df4c75] _ensure_no_port_binding_failure(port) [ 674.950635] env[62096]: ERROR nova.compute.manager [instance: 374ca884-8e77-4568-8667-e124e6df4c75] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 674.950635] env[62096]: ERROR nova.compute.manager [instance: 374ca884-8e77-4568-8667-e124e6df4c75] raise exception.PortBindingFailed(port_id=port['id']) [ 674.950635] env[62096]: ERROR nova.compute.manager [instance: 374ca884-8e77-4568-8667-e124e6df4c75] nova.exception.PortBindingFailed: Binding failed for port 7158528a-233c-4f68-8cce-fcc21e6131a0, please check neutron logs for more information. [ 674.950635] env[62096]: ERROR nova.compute.manager [instance: 374ca884-8e77-4568-8667-e124e6df4c75] [ 674.961064] env[62096]: INFO nova.compute.manager [None req-5ad75242-2efb-4f8b-a5d0-817319fe309a tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] [instance: 374ca884-8e77-4568-8667-e124e6df4c75] Terminating instance [ 674.965721] env[62096]: DEBUG oslo_concurrency.lockutils [None req-5ad75242-2efb-4f8b-a5d0-817319fe309a tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Acquiring lock "refresh_cache-374ca884-8e77-4568-8667-e124e6df4c75" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 675.007271] env[62096]: DEBUG oslo_concurrency.lockutils [None req-1aa056a4-9f55-42a7-a362-b5b727b382b0 tempest-ServersTestManualDisk-1610766175 tempest-ServersTestManualDisk-1610766175-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 675.010991] env[62096]: DEBUG nova.network.neutron [req-8515d434-f0a2-4bd2-bddd-3c81cba51d14 req-699ac484-bc85-45f9-a4d0-d3266ff2e824 service nova] [instance: 374ca884-8e77-4568-8667-e124e6df4c75] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 675.011514] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-8515d434-f0a2-4bd2-bddd-3c81cba51d14 req-699ac484-bc85-45f9-a4d0-d3266ff2e824 service nova] Expecting reply to msg 7c70bde88c2447ea86972724115937bc in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 675.020203] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7c70bde88c2447ea86972724115937bc [ 675.412736] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Instance 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62096) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 675.413358] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg 3784d6ca303b437f865dfb4022f32110 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 675.423308] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3784d6ca303b437f865dfb4022f32110 [ 675.513486] env[62096]: DEBUG oslo_concurrency.lockutils [req-8515d434-f0a2-4bd2-bddd-3c81cba51d14 req-699ac484-bc85-45f9-a4d0-d3266ff2e824 service nova] Releasing lock "refresh_cache-374ca884-8e77-4568-8667-e124e6df4c75" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 675.513923] env[62096]: DEBUG oslo_concurrency.lockutils [None req-5ad75242-2efb-4f8b-a5d0-817319fe309a tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Acquired lock "refresh_cache-374ca884-8e77-4568-8667-e124e6df4c75" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 675.514092] env[62096]: DEBUG nova.network.neutron [None req-5ad75242-2efb-4f8b-a5d0-817319fe309a tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] [instance: 374ca884-8e77-4568-8667-e124e6df4c75] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 675.514528] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-5ad75242-2efb-4f8b-a5d0-817319fe309a tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Expecting reply to msg 9971f7c3dc734be6aa8f729749925d44 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 675.521264] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9971f7c3dc734be6aa8f729749925d44 [ 675.916795] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Instance ac8746f4-95b0-440e-bc3e-a92457ed664f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62096) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 675.917358] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg f67a4d42a2234da0b3a757614b8c4c3f in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 675.927567] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f67a4d42a2234da0b3a757614b8c4c3f [ 676.031819] env[62096]: DEBUG nova.network.neutron [None req-5ad75242-2efb-4f8b-a5d0-817319fe309a tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] [instance: 374ca884-8e77-4568-8667-e124e6df4c75] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 676.109443] env[62096]: DEBUG nova.network.neutron [None req-5ad75242-2efb-4f8b-a5d0-817319fe309a tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] [instance: 374ca884-8e77-4568-8667-e124e6df4c75] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 676.110132] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-5ad75242-2efb-4f8b-a5d0-817319fe309a tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Expecting reply to msg 42f5f7b53aba49baadb6fb629a4dd192 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 676.118014] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 42f5f7b53aba49baadb6fb629a4dd192 [ 676.407885] env[62096]: DEBUG nova.compute.manager [req-036627f2-e532-4fc8-a4a0-f608b62cc650 req-c6e4d4ab-0ce1-4570-bb79-68d4cd366ced service nova] [instance: 374ca884-8e77-4568-8667-e124e6df4c75] Received event network-vif-deleted-7158528a-233c-4f68-8cce-fcc21e6131a0 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 676.419837] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Instance 7d62e302-8080-4699-b88d-cb29031e6707 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62096) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 676.420539] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg ae74c86b8f4f47908c9e8ea655b884fd in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 676.430733] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ae74c86b8f4f47908c9e8ea655b884fd [ 676.613634] env[62096]: DEBUG oslo_concurrency.lockutils [None req-5ad75242-2efb-4f8b-a5d0-817319fe309a tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Releasing lock "refresh_cache-374ca884-8e77-4568-8667-e124e6df4c75" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 676.614099] env[62096]: DEBUG nova.compute.manager [None req-5ad75242-2efb-4f8b-a5d0-817319fe309a tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] [instance: 374ca884-8e77-4568-8667-e124e6df4c75] Start destroying the instance on the hypervisor. {{(pid=62096) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 676.614295] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-5ad75242-2efb-4f8b-a5d0-817319fe309a tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] [instance: 374ca884-8e77-4568-8667-e124e6df4c75] Destroying instance {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 676.614604] env[62096]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2c25774a-f918-4af6-96c9-1fddd6a58874 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.623314] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa5240cd-a593-4610-93ca-0283cf526dd4 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.644205] env[62096]: WARNING nova.virt.vmwareapi.vmops [None req-5ad75242-2efb-4f8b-a5d0-817319fe309a tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] [instance: 374ca884-8e77-4568-8667-e124e6df4c75] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 374ca884-8e77-4568-8667-e124e6df4c75 could not be found. [ 676.644426] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-5ad75242-2efb-4f8b-a5d0-817319fe309a tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] [instance: 374ca884-8e77-4568-8667-e124e6df4c75] Instance destroyed {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 676.644605] env[62096]: INFO nova.compute.manager [None req-5ad75242-2efb-4f8b-a5d0-817319fe309a tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] [instance: 374ca884-8e77-4568-8667-e124e6df4c75] Took 0.03 seconds to destroy the instance on the hypervisor. [ 676.644837] env[62096]: DEBUG oslo.service.loopingcall [None req-5ad75242-2efb-4f8b-a5d0-817319fe309a tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 676.645076] env[62096]: DEBUG nova.compute.manager [-] [instance: 374ca884-8e77-4568-8667-e124e6df4c75] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 676.645170] env[62096]: DEBUG nova.network.neutron [-] [instance: 374ca884-8e77-4568-8667-e124e6df4c75] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 676.660072] env[62096]: DEBUG nova.network.neutron [-] [instance: 374ca884-8e77-4568-8667-e124e6df4c75] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 676.660559] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg b4ebcf15c34b46728f23ccdc57b5817e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 676.669176] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b4ebcf15c34b46728f23ccdc57b5817e [ 676.923104] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Instance 3480c79b-58e4-4759-acd4-b2f45f22da54 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62096) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 676.923699] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg b311457b2ad0415abc2833a3a88ef3d9 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 676.939924] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b311457b2ad0415abc2833a3a88ef3d9 [ 677.162283] env[62096]: DEBUG nova.network.neutron [-] [instance: 374ca884-8e77-4568-8667-e124e6df4c75] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 677.162771] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg b581ebf5d175428a942c642beaee5a50 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 677.172409] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b581ebf5d175428a942c642beaee5a50 [ 677.426693] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Instance 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62096) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 677.427268] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg 22a8856ca5ac4bbf8497844d412e1508 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 677.436830] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 22a8856ca5ac4bbf8497844d412e1508 [ 677.664762] env[62096]: INFO nova.compute.manager [-] [instance: 374ca884-8e77-4568-8667-e124e6df4c75] Took 1.02 seconds to deallocate network for instance. [ 677.667102] env[62096]: DEBUG nova.compute.claims [None req-5ad75242-2efb-4f8b-a5d0-817319fe309a tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] [instance: 374ca884-8e77-4568-8667-e124e6df4c75] Aborting claim: {{(pid=62096) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 677.667289] env[62096]: DEBUG oslo_concurrency.lockutils [None req-5ad75242-2efb-4f8b-a5d0-817319fe309a tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 677.930016] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Instance 03570c3d-3ca5-495d-8a52-2f86b280f667 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62096) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 677.930611] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg 01aa62d2847943e39a12d7d4c6b5afa3 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 677.946575] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 01aa62d2847943e39a12d7d4c6b5afa3 [ 678.433462] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Instance d38be540-9cd0-428a-b10d-313d2d464b25 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62096) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 678.434073] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg a986c6013e984e7187cf5a8fd0b6002a in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 678.444371] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a986c6013e984e7187cf5a8fd0b6002a [ 678.938077] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Instance 58df043b-ab2f-4e78-8bba-084fe53d3d8e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62096) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 678.938077] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg f3c35140c28649368e1b650bafe67c57 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 678.947668] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f3c35140c28649368e1b650bafe67c57 [ 679.440380] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Instance 321ab95b-6221-4bab-b442-a90926098dae has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62096) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 679.441064] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg 1830c6d564444523a7152357d1325928 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 679.451121] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1830c6d564444523a7152357d1325928 [ 679.943007] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Instance 59b37648-7b23-4ae3-90e6-867fbbde25df has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62096) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 679.943592] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg a874cf520222421e871337845939d47c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 679.953641] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a874cf520222421e871337845939d47c [ 680.446543] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Instance 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62096) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 680.447377] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg a363a93ab199413090e553044077c791 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 680.461200] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a363a93ab199413090e553044077c791 [ 680.951021] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Instance dafff089-f6e4-4269-bf0e-ea305c11ff36 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62096) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 680.951618] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg 4b471c12f6804e338e227c83250c32d4 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 680.962151] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4b471c12f6804e338e227c83250c32d4 [ 681.454901] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Instance 7bfac3e2-f06c-4690-9215-a5f67a67c5bd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62096) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 681.455519] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg b6c12cd6b92f428391853687d8f240a9 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 681.466507] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b6c12cd6b92f428391853687d8f240a9 [ 681.958616] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Instance fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62096) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 681.959201] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg 02e99f635d2e4d599525f6d2bbc69497 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 681.970357] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 02e99f635d2e4d599525f6d2bbc69497 [ 682.462507] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Instance 510189aa-204c-4fd6-90d5-47a7ce5f7630 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62096) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 682.462968] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg cdc787e7c0714a4090836f8945535ee3 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 682.498547] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cdc787e7c0714a4090836f8945535ee3 [ 682.966017] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Instance de88f8a1-20a5-49f9-adcb-de48aeaa548a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62096) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 682.966017] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg e31d5120ea9f4fecb7dce6720b7932e7 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 682.976170] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e31d5120ea9f4fecb7dce6720b7932e7 [ 683.468201] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Instance d7bd302c-1d85-45c2-9a3e-9855a6488d92 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62096) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 683.468760] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg 1c69a7b64f8a44a199e5f86a6e3ce632 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 683.480014] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1c69a7b64f8a44a199e5f86a6e3ce632 [ 683.971016] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Instance cffc0ebc-4fb1-47c9-8882-b8431046ef2f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62096) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 683.971589] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg 1b3e6d00f77944a1a7229dc150e3c428 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 683.981580] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1b3e6d00f77944a1a7229dc150e3c428 [ 684.476509] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Instance e8631ec4-1823-46d2-8553-05e3336fed32 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62096) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 684.476509] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg 483dd8e1eec84792924808040a945824 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 684.486307] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 483dd8e1eec84792924808040a945824 [ 684.980469] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Instance f050f0de-dc84-4825-b490-eafe522354cd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62096) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 684.980469] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg 31aa338b15ba40708ccf27d330ee4699 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 684.990987] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 31aa338b15ba40708ccf27d330ee4699 [ 685.484703] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Instance 3283ae52-1a1f-4dec-91f3-44cc42361bb5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62096) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 685.484703] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg 665491de0ea2450e82cd7b68db40c6d1 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 685.494772] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 665491de0ea2450e82cd7b68db40c6d1 [ 685.988277] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Instance c928bf83-9517-449a-854c-6f3d8ce4faa0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62096) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 685.988277] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg 08b8475d30f343f0a7e54935cc86d2e0 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 685.998782] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 08b8475d30f343f0a7e54935cc86d2e0 [ 686.490450] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Instance a5b5afc7-abc0-430a-b682-2c1946b4a6d8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62096) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 686.491088] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg bea594449f1346b2af950738ae046b43 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 686.502182] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bea594449f1346b2af950738ae046b43 [ 686.993822] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Instance 4ce3f48e-e45c-4628-8c35-8493c655a6f3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62096) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 686.993988] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Total usable vcpus: 48, total allocated vcpus: 4 {{(pid=62096) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 686.994122] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1280MB phys_disk=200GB used_disk=4GB total_vcpus=48 used_vcpus=4 pci_stats=[] {{(pid=62096) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 687.337361] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fb8d092-2170-4624-82d7-7a318e749df3 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.344895] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-156ed292-1af7-446a-968e-11347652641e {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.372535] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-987896a9-4626-4dc7-a2c6-68da3043b6fb {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.378807] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56589181-04a4-443d-85e9-5f75af7e6f7b {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.391072] env[62096]: DEBUG nova.compute.provider_tree [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 687.391508] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg fdb1f5e4ef2f4ddcbc8c489eb45289d7 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 687.399065] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fdb1f5e4ef2f4ddcbc8c489eb45289d7 [ 687.894589] env[62096]: DEBUG nova.scheduler.client.report [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 687.896642] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg 7642778d092441118e2af28d34ac75bc in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 687.910506] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7642778d092441118e2af28d34ac75bc [ 688.399073] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62096) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 688.399330] env[62096]: DEBUG oslo_concurrency.lockutils [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 15.539s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 688.399616] env[62096]: DEBUG oslo_concurrency.lockutils [None req-7f410855-d894-4223-982d-162ddb3e3fc7 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.397s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 688.401199] env[62096]: INFO nova.compute.claims [None req-7f410855-d894-4223-982d-162ddb3e3fc7 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 688.402827] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-7f410855-d894-4223-982d-162ddb3e3fc7 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Expecting reply to msg 7a9a1c130d15410eae6de22bf87cddd2 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 688.442022] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7a9a1c130d15410eae6de22bf87cddd2 [ 688.907046] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-7f410855-d894-4223-982d-162ddb3e3fc7 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Expecting reply to msg 61d6d322b1e94f43b5d974ec1243ef09 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 688.914792] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 61d6d322b1e94f43b5d974ec1243ef09 [ 689.727486] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb56a1d2-911c-493f-acf3-371ac5b235b7 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.734526] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fb28a76-4bea-4a3e-9fb7-61e96b24a576 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.764019] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-115821a1-f39c-44ba-ba77-61d4cd3714b6 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.770697] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39e5c0a5-2ad4-455a-9fb0-bfe54d5f8e83 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.782995] env[62096]: DEBUG nova.compute.provider_tree [None req-7f410855-d894-4223-982d-162ddb3e3fc7 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 689.783469] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-7f410855-d894-4223-982d-162ddb3e3fc7 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Expecting reply to msg 7ccaf65d72d24ed49e800f02fb917b08 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 689.790937] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7ccaf65d72d24ed49e800f02fb917b08 [ 690.286639] env[62096]: DEBUG nova.scheduler.client.report [None req-7f410855-d894-4223-982d-162ddb3e3fc7 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 690.289207] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-7f410855-d894-4223-982d-162ddb3e3fc7 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Expecting reply to msg 15cf671faab14687bb4ca0c4b12fbc77 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 690.300057] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 15cf671faab14687bb4ca0c4b12fbc77 [ 690.791970] env[62096]: DEBUG oslo_concurrency.lockutils [None req-7f410855-d894-4223-982d-162ddb3e3fc7 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.392s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 690.792552] env[62096]: DEBUG nova.compute.manager [None req-7f410855-d894-4223-982d-162ddb3e3fc7 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] Start building networks asynchronously for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 690.794388] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-7f410855-d894-4223-982d-162ddb3e3fc7 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Expecting reply to msg b83828ce13754184ac1c1993cbd0d802 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 690.795565] env[62096]: DEBUG oslo_concurrency.lockutils [None req-b67d5e24-acb6-4b89-8ad7-369461375b68 tempest-ServerExternalEventsTest-882999065 tempest-ServerExternalEventsTest-882999065-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 30.245s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 690.797197] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b67d5e24-acb6-4b89-8ad7-369461375b68 tempest-ServerExternalEventsTest-882999065 tempest-ServerExternalEventsTest-882999065-project-member] Expecting reply to msg f6ecff6e0a14499d99e24bcde386d620 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 690.823855] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b83828ce13754184ac1c1993cbd0d802 [ 690.829768] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f6ecff6e0a14499d99e24bcde386d620 [ 691.300495] env[62096]: DEBUG nova.compute.utils [None req-7f410855-d894-4223-982d-162ddb3e3fc7 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Using /dev/sd instead of None {{(pid=62096) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 691.301255] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-7f410855-d894-4223-982d-162ddb3e3fc7 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Expecting reply to msg 8b30ad335b8c4fc8b7227e5d2bbc6f20 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 691.305208] env[62096]: DEBUG nova.compute.manager [None req-7f410855-d894-4223-982d-162ddb3e3fc7 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] Allocating IP information in the background. {{(pid=62096) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 691.305384] env[62096]: DEBUG nova.network.neutron [None req-7f410855-d894-4223-982d-162ddb3e3fc7 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] allocate_for_instance() {{(pid=62096) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 691.312769] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8b30ad335b8c4fc8b7227e5d2bbc6f20 [ 691.358912] env[62096]: DEBUG nova.policy [None req-7f410855-d894-4223-982d-162ddb3e3fc7 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6f956b78c87f49c29ac1d804316f1896', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '205e7b01639c499baadc35fb26fba6ff', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62096) authorize /opt/stack/nova/nova/policy.py:203}} [ 691.702856] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-981a3f43-7cfa-4f99-bdcb-313cc97f3b73 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.706803] env[62096]: DEBUG nova.network.neutron [None req-7f410855-d894-4223-982d-162ddb3e3fc7 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] Successfully created port: da3e6abf-b3f7-4739-b243-642f489448db {{(pid=62096) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 691.713601] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-956470c2-9827-486e-a678-68450f15dc0f {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.742748] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5ff1c67-bc88-41ae-9b2d-6f93d63f9b64 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.750418] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70a2e207-06e1-484c-9de4-57cf46ea5c72 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.765140] env[62096]: DEBUG nova.compute.provider_tree [None req-b67d5e24-acb6-4b89-8ad7-369461375b68 tempest-ServerExternalEventsTest-882999065 tempest-ServerExternalEventsTest-882999065-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 691.765654] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b67d5e24-acb6-4b89-8ad7-369461375b68 tempest-ServerExternalEventsTest-882999065 tempest-ServerExternalEventsTest-882999065-project-member] Expecting reply to msg 82649598d1b34d84acccda3dd928997e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 691.773888] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 82649598d1b34d84acccda3dd928997e [ 691.806770] env[62096]: DEBUG nova.compute.manager [None req-7f410855-d894-4223-982d-162ddb3e3fc7 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] Start building block device mappings for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 691.809038] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-7f410855-d894-4223-982d-162ddb3e3fc7 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Expecting reply to msg c3f3fa5eb6d04a6dbfec2c33a922a271 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 691.839911] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c3f3fa5eb6d04a6dbfec2c33a922a271 [ 692.271733] env[62096]: DEBUG nova.scheduler.client.report [None req-b67d5e24-acb6-4b89-8ad7-369461375b68 tempest-ServerExternalEventsTest-882999065 tempest-ServerExternalEventsTest-882999065-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 692.271733] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b67d5e24-acb6-4b89-8ad7-369461375b68 tempest-ServerExternalEventsTest-882999065 tempest-ServerExternalEventsTest-882999065-project-member] Expecting reply to msg 2022e9aef7f9432c9b31983c4f8bc888 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 692.282707] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2022e9aef7f9432c9b31983c4f8bc888 [ 692.316337] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-7f410855-d894-4223-982d-162ddb3e3fc7 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Expecting reply to msg a54b1056efb94385a0698fae167e8a83 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 692.355452] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a54b1056efb94385a0698fae167e8a83 [ 692.448487] env[62096]: DEBUG nova.compute.manager [req-3290ba01-eb43-4293-9ce0-b86e39527165 req-b3c7acd7-fe6a-4325-afc0-92d16fb75df9 service nova] [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] Received event network-changed-da3e6abf-b3f7-4739-b243-642f489448db {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 692.448487] env[62096]: DEBUG nova.compute.manager [req-3290ba01-eb43-4293-9ce0-b86e39527165 req-b3c7acd7-fe6a-4325-afc0-92d16fb75df9 service nova] [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] Refreshing instance network info cache due to event network-changed-da3e6abf-b3f7-4739-b243-642f489448db. {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 692.448487] env[62096]: DEBUG oslo_concurrency.lockutils [req-3290ba01-eb43-4293-9ce0-b86e39527165 req-b3c7acd7-fe6a-4325-afc0-92d16fb75df9 service nova] Acquiring lock "refresh_cache-575e00ca-0f97-42c5-9e4d-706c21453210" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 692.448487] env[62096]: DEBUG oslo_concurrency.lockutils [req-3290ba01-eb43-4293-9ce0-b86e39527165 req-b3c7acd7-fe6a-4325-afc0-92d16fb75df9 service nova] Acquired lock "refresh_cache-575e00ca-0f97-42c5-9e4d-706c21453210" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 692.448487] env[62096]: DEBUG nova.network.neutron [req-3290ba01-eb43-4293-9ce0-b86e39527165 req-b3c7acd7-fe6a-4325-afc0-92d16fb75df9 service nova] [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] Refreshing network info cache for port da3e6abf-b3f7-4739-b243-642f489448db {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 692.448916] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-3290ba01-eb43-4293-9ce0-b86e39527165 req-b3c7acd7-fe6a-4325-afc0-92d16fb75df9 service nova] Expecting reply to msg 550d37cba6314c0bab2e014415c7af04 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 692.455750] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 550d37cba6314c0bab2e014415c7af04 [ 692.695720] env[62096]: ERROR nova.compute.manager [None req-7f410855-d894-4223-982d-162ddb3e3fc7 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port da3e6abf-b3f7-4739-b243-642f489448db, please check neutron logs for more information. [ 692.695720] env[62096]: ERROR nova.compute.manager Traceback (most recent call last): [ 692.695720] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 692.695720] env[62096]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 692.695720] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 692.695720] env[62096]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 692.695720] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 692.695720] env[62096]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 692.695720] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 692.695720] env[62096]: ERROR nova.compute.manager self.force_reraise() [ 692.695720] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 692.695720] env[62096]: ERROR nova.compute.manager raise self.value [ 692.695720] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 692.695720] env[62096]: ERROR nova.compute.manager updated_port = self._update_port( [ 692.695720] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 692.695720] env[62096]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 692.696294] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 692.696294] env[62096]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 692.696294] env[62096]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port da3e6abf-b3f7-4739-b243-642f489448db, please check neutron logs for more information. [ 692.696294] env[62096]: ERROR nova.compute.manager [ 692.696294] env[62096]: Traceback (most recent call last): [ 692.696294] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 692.696294] env[62096]: listener.cb(fileno) [ 692.696294] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 692.696294] env[62096]: result = function(*args, **kwargs) [ 692.696294] env[62096]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 692.696294] env[62096]: return func(*args, **kwargs) [ 692.696294] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 692.696294] env[62096]: raise e [ 692.696294] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 692.696294] env[62096]: nwinfo = self.network_api.allocate_for_instance( [ 692.696294] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 692.696294] env[62096]: created_port_ids = self._update_ports_for_instance( [ 692.696294] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 692.696294] env[62096]: with excutils.save_and_reraise_exception(): [ 692.696294] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 692.696294] env[62096]: self.force_reraise() [ 692.696294] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 692.696294] env[62096]: raise self.value [ 692.696294] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 692.696294] env[62096]: updated_port = self._update_port( [ 692.696294] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 692.696294] env[62096]: _ensure_no_port_binding_failure(port) [ 692.696294] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 692.696294] env[62096]: raise exception.PortBindingFailed(port_id=port['id']) [ 692.697184] env[62096]: nova.exception.PortBindingFailed: Binding failed for port da3e6abf-b3f7-4739-b243-642f489448db, please check neutron logs for more information. [ 692.697184] env[62096]: Removing descriptor: 14 [ 692.778955] env[62096]: DEBUG oslo_concurrency.lockutils [None req-b67d5e24-acb6-4b89-8ad7-369461375b68 tempest-ServerExternalEventsTest-882999065 tempest-ServerExternalEventsTest-882999065-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.979s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 692.778955] env[62096]: ERROR nova.compute.manager [None req-b67d5e24-acb6-4b89-8ad7-369461375b68 tempest-ServerExternalEventsTest-882999065 tempest-ServerExternalEventsTest-882999065-project-member] [instance: 62b778aa-71c7-480b-8148-017773246caf] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port a72a7432-c15c-4535-bfe3-9b96b41ec2ce, please check neutron logs for more information. [ 692.778955] env[62096]: ERROR nova.compute.manager [instance: 62b778aa-71c7-480b-8148-017773246caf] Traceback (most recent call last): [ 692.778955] env[62096]: ERROR nova.compute.manager [instance: 62b778aa-71c7-480b-8148-017773246caf] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 692.778955] env[62096]: ERROR nova.compute.manager [instance: 62b778aa-71c7-480b-8148-017773246caf] self.driver.spawn(context, instance, image_meta, [ 692.778955] env[62096]: ERROR nova.compute.manager [instance: 62b778aa-71c7-480b-8148-017773246caf] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 692.778955] env[62096]: ERROR nova.compute.manager [instance: 62b778aa-71c7-480b-8148-017773246caf] self._vmops.spawn(context, instance, image_meta, injected_files, [ 692.778955] env[62096]: ERROR nova.compute.manager [instance: 62b778aa-71c7-480b-8148-017773246caf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 692.778955] env[62096]: ERROR nova.compute.manager [instance: 62b778aa-71c7-480b-8148-017773246caf] vm_ref = self.build_virtual_machine(instance, [ 692.779310] env[62096]: ERROR nova.compute.manager [instance: 62b778aa-71c7-480b-8148-017773246caf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 692.779310] env[62096]: ERROR nova.compute.manager [instance: 62b778aa-71c7-480b-8148-017773246caf] vif_infos = vmwarevif.get_vif_info(self._session, [ 692.779310] env[62096]: ERROR nova.compute.manager [instance: 62b778aa-71c7-480b-8148-017773246caf] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 692.779310] env[62096]: ERROR nova.compute.manager [instance: 62b778aa-71c7-480b-8148-017773246caf] for vif in network_info: [ 692.779310] env[62096]: ERROR nova.compute.manager [instance: 62b778aa-71c7-480b-8148-017773246caf] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 692.779310] env[62096]: ERROR nova.compute.manager [instance: 62b778aa-71c7-480b-8148-017773246caf] return self._sync_wrapper(fn, *args, **kwargs) [ 692.779310] env[62096]: ERROR nova.compute.manager [instance: 62b778aa-71c7-480b-8148-017773246caf] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 692.779310] env[62096]: ERROR nova.compute.manager [instance: 62b778aa-71c7-480b-8148-017773246caf] self.wait() [ 692.779310] env[62096]: ERROR nova.compute.manager [instance: 62b778aa-71c7-480b-8148-017773246caf] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 692.779310] env[62096]: ERROR nova.compute.manager [instance: 62b778aa-71c7-480b-8148-017773246caf] self[:] = self._gt.wait() [ 692.779310] env[62096]: ERROR nova.compute.manager [instance: 62b778aa-71c7-480b-8148-017773246caf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 692.779310] env[62096]: ERROR nova.compute.manager [instance: 62b778aa-71c7-480b-8148-017773246caf] return self._exit_event.wait() [ 692.779310] env[62096]: ERROR nova.compute.manager [instance: 62b778aa-71c7-480b-8148-017773246caf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 692.779658] env[62096]: ERROR nova.compute.manager [instance: 62b778aa-71c7-480b-8148-017773246caf] result = hub.switch() [ 692.779658] env[62096]: ERROR nova.compute.manager [instance: 62b778aa-71c7-480b-8148-017773246caf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 692.779658] env[62096]: ERROR nova.compute.manager [instance: 62b778aa-71c7-480b-8148-017773246caf] return self.greenlet.switch() [ 692.779658] env[62096]: ERROR nova.compute.manager [instance: 62b778aa-71c7-480b-8148-017773246caf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 692.779658] env[62096]: ERROR nova.compute.manager [instance: 62b778aa-71c7-480b-8148-017773246caf] result = function(*args, **kwargs) [ 692.779658] env[62096]: ERROR nova.compute.manager [instance: 62b778aa-71c7-480b-8148-017773246caf] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 692.779658] env[62096]: ERROR nova.compute.manager [instance: 62b778aa-71c7-480b-8148-017773246caf] return func(*args, **kwargs) [ 692.779658] env[62096]: ERROR nova.compute.manager [instance: 62b778aa-71c7-480b-8148-017773246caf] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 692.779658] env[62096]: ERROR nova.compute.manager [instance: 62b778aa-71c7-480b-8148-017773246caf] raise e [ 692.779658] env[62096]: ERROR nova.compute.manager [instance: 62b778aa-71c7-480b-8148-017773246caf] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 692.779658] env[62096]: ERROR nova.compute.manager [instance: 62b778aa-71c7-480b-8148-017773246caf] nwinfo = self.network_api.allocate_for_instance( [ 692.779658] env[62096]: ERROR nova.compute.manager [instance: 62b778aa-71c7-480b-8148-017773246caf] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 692.779658] env[62096]: ERROR nova.compute.manager [instance: 62b778aa-71c7-480b-8148-017773246caf] created_port_ids = self._update_ports_for_instance( [ 692.780023] env[62096]: ERROR nova.compute.manager [instance: 62b778aa-71c7-480b-8148-017773246caf] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 692.780023] env[62096]: ERROR nova.compute.manager [instance: 62b778aa-71c7-480b-8148-017773246caf] with excutils.save_and_reraise_exception(): [ 692.780023] env[62096]: ERROR nova.compute.manager [instance: 62b778aa-71c7-480b-8148-017773246caf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 692.780023] env[62096]: ERROR nova.compute.manager [instance: 62b778aa-71c7-480b-8148-017773246caf] self.force_reraise() [ 692.780023] env[62096]: ERROR nova.compute.manager [instance: 62b778aa-71c7-480b-8148-017773246caf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 692.780023] env[62096]: ERROR nova.compute.manager [instance: 62b778aa-71c7-480b-8148-017773246caf] raise self.value [ 692.780023] env[62096]: ERROR nova.compute.manager [instance: 62b778aa-71c7-480b-8148-017773246caf] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 692.780023] env[62096]: ERROR nova.compute.manager [instance: 62b778aa-71c7-480b-8148-017773246caf] updated_port = self._update_port( [ 692.780023] env[62096]: ERROR nova.compute.manager [instance: 62b778aa-71c7-480b-8148-017773246caf] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 692.780023] env[62096]: ERROR nova.compute.manager [instance: 62b778aa-71c7-480b-8148-017773246caf] _ensure_no_port_binding_failure(port) [ 692.780023] env[62096]: ERROR nova.compute.manager [instance: 62b778aa-71c7-480b-8148-017773246caf] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 692.780023] env[62096]: ERROR nova.compute.manager [instance: 62b778aa-71c7-480b-8148-017773246caf] raise exception.PortBindingFailed(port_id=port['id']) [ 692.780409] env[62096]: ERROR nova.compute.manager [instance: 62b778aa-71c7-480b-8148-017773246caf] nova.exception.PortBindingFailed: Binding failed for port a72a7432-c15c-4535-bfe3-9b96b41ec2ce, please check neutron logs for more information. [ 692.780409] env[62096]: ERROR nova.compute.manager [instance: 62b778aa-71c7-480b-8148-017773246caf] [ 692.780409] env[62096]: DEBUG nova.compute.utils [None req-b67d5e24-acb6-4b89-8ad7-369461375b68 tempest-ServerExternalEventsTest-882999065 tempest-ServerExternalEventsTest-882999065-project-member] [instance: 62b778aa-71c7-480b-8148-017773246caf] Binding failed for port a72a7432-c15c-4535-bfe3-9b96b41ec2ce, please check neutron logs for more information. {{(pid=62096) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 692.780409] env[62096]: DEBUG oslo_concurrency.lockutils [None req-fdc6c660-999b-45f5-b0c3-7b576c288147 tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.711s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 692.780409] env[62096]: INFO nova.compute.claims [None req-fdc6c660-999b-45f5-b0c3-7b576c288147 tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 692.782307] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-fdc6c660-999b-45f5-b0c3-7b576c288147 tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Expecting reply to msg ce32e8d6d6fb419a830f58bbb64c98b0 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 692.783113] env[62096]: DEBUG nova.compute.manager [None req-b67d5e24-acb6-4b89-8ad7-369461375b68 tempest-ServerExternalEventsTest-882999065 tempest-ServerExternalEventsTest-882999065-project-member] [instance: 62b778aa-71c7-480b-8148-017773246caf] Build of instance 62b778aa-71c7-480b-8148-017773246caf was re-scheduled: Binding failed for port a72a7432-c15c-4535-bfe3-9b96b41ec2ce, please check neutron logs for more information. {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 692.783614] env[62096]: DEBUG nova.compute.manager [None req-b67d5e24-acb6-4b89-8ad7-369461375b68 tempest-ServerExternalEventsTest-882999065 tempest-ServerExternalEventsTest-882999065-project-member] [instance: 62b778aa-71c7-480b-8148-017773246caf] Unplugging VIFs for instance {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 692.783926] env[62096]: DEBUG oslo_concurrency.lockutils [None req-b67d5e24-acb6-4b89-8ad7-369461375b68 tempest-ServerExternalEventsTest-882999065 tempest-ServerExternalEventsTest-882999065-project-member] Acquiring lock "refresh_cache-62b778aa-71c7-480b-8148-017773246caf" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 692.784147] env[62096]: DEBUG oslo_concurrency.lockutils [None req-b67d5e24-acb6-4b89-8ad7-369461375b68 tempest-ServerExternalEventsTest-882999065 tempest-ServerExternalEventsTest-882999065-project-member] Acquired lock "refresh_cache-62b778aa-71c7-480b-8148-017773246caf" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 692.784371] env[62096]: DEBUG nova.network.neutron [None req-b67d5e24-acb6-4b89-8ad7-369461375b68 tempest-ServerExternalEventsTest-882999065 tempest-ServerExternalEventsTest-882999065-project-member] [instance: 62b778aa-71c7-480b-8148-017773246caf] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 692.784782] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b67d5e24-acb6-4b89-8ad7-369461375b68 tempest-ServerExternalEventsTest-882999065 tempest-ServerExternalEventsTest-882999065-project-member] Expecting reply to msg 7aa6464a38aa438dbac9ff3a82c421ee in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 692.791119] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7aa6464a38aa438dbac9ff3a82c421ee [ 692.815869] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ce32e8d6d6fb419a830f58bbb64c98b0 [ 692.817141] env[62096]: DEBUG nova.compute.manager [None req-7f410855-d894-4223-982d-162ddb3e3fc7 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] Start spawning the instance on the hypervisor. {{(pid=62096) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 692.842519] env[62096]: DEBUG nova.virt.hardware [None req-7f410855-d894-4223-982d-162ddb3e3fc7 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-19T11:52:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-19T11:51:55Z,direct_url=,disk_format='vmdk',id=fb764baa-3805-45c4-a694-aa91b0932110,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bd76085d004b408bbcf1ebf76ba2593e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-19T11:51:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 692.842759] env[62096]: DEBUG nova.virt.hardware [None req-7f410855-d894-4223-982d-162ddb3e3fc7 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Flavor limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 692.842904] env[62096]: DEBUG nova.virt.hardware [None req-7f410855-d894-4223-982d-162ddb3e3fc7 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Image limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 692.843086] env[62096]: DEBUG nova.virt.hardware [None req-7f410855-d894-4223-982d-162ddb3e3fc7 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Flavor pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 692.843231] env[62096]: DEBUG nova.virt.hardware [None req-7f410855-d894-4223-982d-162ddb3e3fc7 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Image pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 692.843390] env[62096]: DEBUG nova.virt.hardware [None req-7f410855-d894-4223-982d-162ddb3e3fc7 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 692.843595] env[62096]: DEBUG nova.virt.hardware [None req-7f410855-d894-4223-982d-162ddb3e3fc7 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 692.843752] env[62096]: DEBUG nova.virt.hardware [None req-7f410855-d894-4223-982d-162ddb3e3fc7 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 692.843935] env[62096]: DEBUG nova.virt.hardware [None req-7f410855-d894-4223-982d-162ddb3e3fc7 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Got 1 possible topologies {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 692.844125] env[62096]: DEBUG nova.virt.hardware [None req-7f410855-d894-4223-982d-162ddb3e3fc7 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 692.844300] env[62096]: DEBUG nova.virt.hardware [None req-7f410855-d894-4223-982d-162ddb3e3fc7 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 692.845342] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7246fa47-fcde-4e6e-957f-f2411d2b230c {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.853364] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9543755c-3df2-4558-a27e-d7f38b8e73a8 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.866558] env[62096]: ERROR nova.compute.manager [None req-7f410855-d894-4223-982d-162ddb3e3fc7 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port da3e6abf-b3f7-4739-b243-642f489448db, please check neutron logs for more information. [ 692.866558] env[62096]: ERROR nova.compute.manager [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] Traceback (most recent call last): [ 692.866558] env[62096]: ERROR nova.compute.manager [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 692.866558] env[62096]: ERROR nova.compute.manager [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] yield resources [ 692.866558] env[62096]: ERROR nova.compute.manager [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 692.866558] env[62096]: ERROR nova.compute.manager [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] self.driver.spawn(context, instance, image_meta, [ 692.866558] env[62096]: ERROR nova.compute.manager [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 692.866558] env[62096]: ERROR nova.compute.manager [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] self._vmops.spawn(context, instance, image_meta, injected_files, [ 692.866558] env[62096]: ERROR nova.compute.manager [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 692.866558] env[62096]: ERROR nova.compute.manager [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] vm_ref = self.build_virtual_machine(instance, [ 692.866558] env[62096]: ERROR nova.compute.manager [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 692.867206] env[62096]: ERROR nova.compute.manager [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] vif_infos = vmwarevif.get_vif_info(self._session, [ 692.867206] env[62096]: ERROR nova.compute.manager [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 692.867206] env[62096]: ERROR nova.compute.manager [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] for vif in network_info: [ 692.867206] env[62096]: ERROR nova.compute.manager [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 692.867206] env[62096]: ERROR nova.compute.manager [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] return self._sync_wrapper(fn, *args, **kwargs) [ 692.867206] env[62096]: ERROR nova.compute.manager [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 692.867206] env[62096]: ERROR nova.compute.manager [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] self.wait() [ 692.867206] env[62096]: ERROR nova.compute.manager [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 692.867206] env[62096]: ERROR nova.compute.manager [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] self[:] = self._gt.wait() [ 692.867206] env[62096]: ERROR nova.compute.manager [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 692.867206] env[62096]: ERROR nova.compute.manager [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] return self._exit_event.wait() [ 692.867206] env[62096]: ERROR nova.compute.manager [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 692.867206] env[62096]: ERROR nova.compute.manager [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] current.throw(*self._exc) [ 692.867807] env[62096]: ERROR nova.compute.manager [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 692.867807] env[62096]: ERROR nova.compute.manager [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] result = function(*args, **kwargs) [ 692.867807] env[62096]: ERROR nova.compute.manager [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 692.867807] env[62096]: ERROR nova.compute.manager [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] return func(*args, **kwargs) [ 692.867807] env[62096]: ERROR nova.compute.manager [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 692.867807] env[62096]: ERROR nova.compute.manager [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] raise e [ 692.867807] env[62096]: ERROR nova.compute.manager [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 692.867807] env[62096]: ERROR nova.compute.manager [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] nwinfo = self.network_api.allocate_for_instance( [ 692.867807] env[62096]: ERROR nova.compute.manager [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 692.867807] env[62096]: ERROR nova.compute.manager [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] created_port_ids = self._update_ports_for_instance( [ 692.867807] env[62096]: ERROR nova.compute.manager [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 692.867807] env[62096]: ERROR nova.compute.manager [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] with excutils.save_and_reraise_exception(): [ 692.867807] env[62096]: ERROR nova.compute.manager [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 692.868541] env[62096]: ERROR nova.compute.manager [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] self.force_reraise() [ 692.868541] env[62096]: ERROR nova.compute.manager [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 692.868541] env[62096]: ERROR nova.compute.manager [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] raise self.value [ 692.868541] env[62096]: ERROR nova.compute.manager [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 692.868541] env[62096]: ERROR nova.compute.manager [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] updated_port = self._update_port( [ 692.868541] env[62096]: ERROR nova.compute.manager [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 692.868541] env[62096]: ERROR nova.compute.manager [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] _ensure_no_port_binding_failure(port) [ 692.868541] env[62096]: ERROR nova.compute.manager [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 692.868541] env[62096]: ERROR nova.compute.manager [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] raise exception.PortBindingFailed(port_id=port['id']) [ 692.868541] env[62096]: ERROR nova.compute.manager [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] nova.exception.PortBindingFailed: Binding failed for port da3e6abf-b3f7-4739-b243-642f489448db, please check neutron logs for more information. [ 692.868541] env[62096]: ERROR nova.compute.manager [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] [ 692.868541] env[62096]: INFO nova.compute.manager [None req-7f410855-d894-4223-982d-162ddb3e3fc7 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] Terminating instance [ 692.869150] env[62096]: DEBUG oslo_concurrency.lockutils [None req-7f410855-d894-4223-982d-162ddb3e3fc7 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Acquiring lock "refresh_cache-575e00ca-0f97-42c5-9e4d-706c21453210" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 692.965135] env[62096]: DEBUG nova.network.neutron [req-3290ba01-eb43-4293-9ce0-b86e39527165 req-b3c7acd7-fe6a-4325-afc0-92d16fb75df9 service nova] [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 693.042042] env[62096]: DEBUG nova.network.neutron [req-3290ba01-eb43-4293-9ce0-b86e39527165 req-b3c7acd7-fe6a-4325-afc0-92d16fb75df9 service nova] [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 693.042711] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-3290ba01-eb43-4293-9ce0-b86e39527165 req-b3c7acd7-fe6a-4325-afc0-92d16fb75df9 service nova] Expecting reply to msg 60081fbec2e44147b71a7e07f608e332 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 693.052647] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 60081fbec2e44147b71a7e07f608e332 [ 693.286019] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-fdc6c660-999b-45f5-b0c3-7b576c288147 tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Expecting reply to msg fa9c26b5085c4b709ddf13c822f9fed3 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 693.294626] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fa9c26b5085c4b709ddf13c822f9fed3 [ 693.303650] env[62096]: DEBUG nova.network.neutron [None req-b67d5e24-acb6-4b89-8ad7-369461375b68 tempest-ServerExternalEventsTest-882999065 tempest-ServerExternalEventsTest-882999065-project-member] [instance: 62b778aa-71c7-480b-8148-017773246caf] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 693.351032] env[62096]: DEBUG nova.network.neutron [None req-b67d5e24-acb6-4b89-8ad7-369461375b68 tempest-ServerExternalEventsTest-882999065 tempest-ServerExternalEventsTest-882999065-project-member] [instance: 62b778aa-71c7-480b-8148-017773246caf] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 693.351583] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b67d5e24-acb6-4b89-8ad7-369461375b68 tempest-ServerExternalEventsTest-882999065 tempest-ServerExternalEventsTest-882999065-project-member] Expecting reply to msg 077dbc95a0034c1ba941bd4c5192902c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 693.360029] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 077dbc95a0034c1ba941bd4c5192902c [ 693.545020] env[62096]: DEBUG oslo_concurrency.lockutils [req-3290ba01-eb43-4293-9ce0-b86e39527165 req-b3c7acd7-fe6a-4325-afc0-92d16fb75df9 service nova] Releasing lock "refresh_cache-575e00ca-0f97-42c5-9e4d-706c21453210" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 693.545456] env[62096]: DEBUG oslo_concurrency.lockutils [None req-7f410855-d894-4223-982d-162ddb3e3fc7 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Acquired lock "refresh_cache-575e00ca-0f97-42c5-9e4d-706c21453210" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 693.545641] env[62096]: DEBUG nova.network.neutron [None req-7f410855-d894-4223-982d-162ddb3e3fc7 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 693.546200] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-7f410855-d894-4223-982d-162ddb3e3fc7 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Expecting reply to msg 6f9b95d7da3c41d9bd3a414071fff792 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 693.552681] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6f9b95d7da3c41d9bd3a414071fff792 [ 693.853500] env[62096]: DEBUG oslo_concurrency.lockutils [None req-b67d5e24-acb6-4b89-8ad7-369461375b68 tempest-ServerExternalEventsTest-882999065 tempest-ServerExternalEventsTest-882999065-project-member] Releasing lock "refresh_cache-62b778aa-71c7-480b-8148-017773246caf" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 693.853793] env[62096]: DEBUG nova.compute.manager [None req-b67d5e24-acb6-4b89-8ad7-369461375b68 tempest-ServerExternalEventsTest-882999065 tempest-ServerExternalEventsTest-882999065-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 693.853988] env[62096]: DEBUG nova.compute.manager [None req-b67d5e24-acb6-4b89-8ad7-369461375b68 tempest-ServerExternalEventsTest-882999065 tempest-ServerExternalEventsTest-882999065-project-member] [instance: 62b778aa-71c7-480b-8148-017773246caf] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 693.854166] env[62096]: DEBUG nova.network.neutron [None req-b67d5e24-acb6-4b89-8ad7-369461375b68 tempest-ServerExternalEventsTest-882999065 tempest-ServerExternalEventsTest-882999065-project-member] [instance: 62b778aa-71c7-480b-8148-017773246caf] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 693.872205] env[62096]: DEBUG nova.network.neutron [None req-b67d5e24-acb6-4b89-8ad7-369461375b68 tempest-ServerExternalEventsTest-882999065 tempest-ServerExternalEventsTest-882999065-project-member] [instance: 62b778aa-71c7-480b-8148-017773246caf] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 693.872756] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b67d5e24-acb6-4b89-8ad7-369461375b68 tempest-ServerExternalEventsTest-882999065 tempest-ServerExternalEventsTest-882999065-project-member] Expecting reply to msg 202e70fdc4c94754849f761c67b64577 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 693.880220] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 202e70fdc4c94754849f761c67b64577 [ 694.062214] env[62096]: DEBUG nova.network.neutron [None req-7f410855-d894-4223-982d-162ddb3e3fc7 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 694.133760] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-533bba58-db2e-4ad1-acad-b77230264435 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.137881] env[62096]: DEBUG nova.network.neutron [None req-7f410855-d894-4223-982d-162ddb3e3fc7 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 694.138409] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-7f410855-d894-4223-982d-162ddb3e3fc7 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Expecting reply to msg 0a7988db017143d48eeafdecd54e1fa3 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 694.144455] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b647770-de10-42e9-ad08-93840f749f20 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.148335] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0a7988db017143d48eeafdecd54e1fa3 [ 694.176951] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd3f2bd7-2cfe-4e4e-a27b-75ee9b1f71ac {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.185278] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f1572b7-e911-4454-a0d0-099b387aa39b {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.198243] env[62096]: DEBUG nova.compute.provider_tree [None req-fdc6c660-999b-45f5-b0c3-7b576c288147 tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 694.198729] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-fdc6c660-999b-45f5-b0c3-7b576c288147 tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Expecting reply to msg 0fcf7d7416ee44d0ab2bec0469758787 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 694.205509] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0fcf7d7416ee44d0ab2bec0469758787 [ 694.374505] env[62096]: DEBUG nova.network.neutron [None req-b67d5e24-acb6-4b89-8ad7-369461375b68 tempest-ServerExternalEventsTest-882999065 tempest-ServerExternalEventsTest-882999065-project-member] [instance: 62b778aa-71c7-480b-8148-017773246caf] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 694.375092] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b67d5e24-acb6-4b89-8ad7-369461375b68 tempest-ServerExternalEventsTest-882999065 tempest-ServerExternalEventsTest-882999065-project-member] Expecting reply to msg 85009a1ff0a441cab68ec946323992ff in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 694.383453] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 85009a1ff0a441cab68ec946323992ff [ 694.495260] env[62096]: DEBUG nova.compute.manager [req-91603e20-e2da-4eb1-b2cf-78fa66eed46d req-8e051dd6-a437-4c6c-896f-f5ca2e0497a7 service nova] [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] Received event network-vif-deleted-da3e6abf-b3f7-4739-b243-642f489448db {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 694.640625] env[62096]: DEBUG oslo_concurrency.lockutils [None req-7f410855-d894-4223-982d-162ddb3e3fc7 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Releasing lock "refresh_cache-575e00ca-0f97-42c5-9e4d-706c21453210" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 694.641053] env[62096]: DEBUG nova.compute.manager [None req-7f410855-d894-4223-982d-162ddb3e3fc7 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] Start destroying the instance on the hypervisor. {{(pid=62096) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 694.641250] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-7f410855-d894-4223-982d-162ddb3e3fc7 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] Destroying instance {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 694.641536] env[62096]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-858f3815-956c-4b2c-b81c-b30fb14f8502 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.650360] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b12e8184-00b7-40b3-9809-7cd497f4c6da {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.672899] env[62096]: WARNING nova.virt.vmwareapi.vmops [None req-7f410855-d894-4223-982d-162ddb3e3fc7 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 575e00ca-0f97-42c5-9e4d-706c21453210 could not be found. [ 694.674346] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-7f410855-d894-4223-982d-162ddb3e3fc7 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] Instance destroyed {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 694.674346] env[62096]: INFO nova.compute.manager [None req-7f410855-d894-4223-982d-162ddb3e3fc7 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] Took 0.03 seconds to destroy the instance on the hypervisor. [ 694.674346] env[62096]: DEBUG oslo.service.loopingcall [None req-7f410855-d894-4223-982d-162ddb3e3fc7 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 694.674346] env[62096]: DEBUG nova.compute.manager [-] [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 694.674346] env[62096]: DEBUG nova.network.neutron [-] [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 694.690005] env[62096]: DEBUG nova.network.neutron [-] [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 694.690479] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 5ac3749d9ea148fbb4637804e91f2df0 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 694.697182] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5ac3749d9ea148fbb4637804e91f2df0 [ 694.701643] env[62096]: DEBUG nova.scheduler.client.report [None req-fdc6c660-999b-45f5-b0c3-7b576c288147 tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 694.704968] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-fdc6c660-999b-45f5-b0c3-7b576c288147 tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Expecting reply to msg 3445bbc3588943b9a91ee71f0273a79c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 694.716664] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3445bbc3588943b9a91ee71f0273a79c [ 694.877598] env[62096]: INFO nova.compute.manager [None req-b67d5e24-acb6-4b89-8ad7-369461375b68 tempest-ServerExternalEventsTest-882999065 tempest-ServerExternalEventsTest-882999065-project-member] [instance: 62b778aa-71c7-480b-8148-017773246caf] Took 1.02 seconds to deallocate network for instance. [ 694.879264] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b67d5e24-acb6-4b89-8ad7-369461375b68 tempest-ServerExternalEventsTest-882999065 tempest-ServerExternalEventsTest-882999065-project-member] Expecting reply to msg 83f46fe89ad248108f0795d92c97922c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 694.915142] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 83f46fe89ad248108f0795d92c97922c [ 695.192576] env[62096]: DEBUG nova.network.neutron [-] [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 695.193014] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg a770b572f550404cb37f136fb416565e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 695.201257] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a770b572f550404cb37f136fb416565e [ 695.206870] env[62096]: DEBUG oslo_concurrency.lockutils [None req-fdc6c660-999b-45f5-b0c3-7b576c288147 tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.429s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 695.207367] env[62096]: DEBUG nova.compute.manager [None req-fdc6c660-999b-45f5-b0c3-7b576c288147 tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] Start building networks asynchronously for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 695.209056] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-fdc6c660-999b-45f5-b0c3-7b576c288147 tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Expecting reply to msg ed8a191fab914548bbf37756015e120e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 695.209964] env[62096]: DEBUG oslo_concurrency.lockutils [None req-70f080d3-692d-46fb-a741-1ba8c3b3362d tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.118s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 695.210142] env[62096]: DEBUG oslo_concurrency.lockutils [None req-70f080d3-692d-46fb-a741-1ba8c3b3362d tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 695.212847] env[62096]: DEBUG oslo_concurrency.lockutils [None req-4c22e505-acaf-4907-b777-48f6dcefcfb3 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.987s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 695.215794] env[62096]: INFO nova.compute.claims [None req-4c22e505-acaf-4907-b777-48f6dcefcfb3 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362-project-member] [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 695.216464] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4c22e505-acaf-4907-b777-48f6dcefcfb3 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362-project-member] Expecting reply to msg 971a570bab584a7dbced88dd71069944 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 695.236000] env[62096]: INFO nova.scheduler.client.report [None req-70f080d3-692d-46fb-a741-1ba8c3b3362d tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] Deleted allocations for instance 06b9105a-8dcb-4bff-bba2-05e179036f24 [ 695.241867] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-70f080d3-692d-46fb-a741-1ba8c3b3362d tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] Expecting reply to msg 314f98b21fee47e5a097a53b42f202ab in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 695.251700] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 971a570bab584a7dbced88dd71069944 [ 695.252094] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ed8a191fab914548bbf37756015e120e [ 695.282314] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 314f98b21fee47e5a097a53b42f202ab [ 695.384208] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b67d5e24-acb6-4b89-8ad7-369461375b68 tempest-ServerExternalEventsTest-882999065 tempest-ServerExternalEventsTest-882999065-project-member] Expecting reply to msg 9574f04b60934fe49a07b09c3f5f2d07 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 695.418091] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9574f04b60934fe49a07b09c3f5f2d07 [ 695.695213] env[62096]: INFO nova.compute.manager [-] [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] Took 1.02 seconds to deallocate network for instance. [ 695.697608] env[62096]: DEBUG nova.compute.claims [None req-7f410855-d894-4223-982d-162ddb3e3fc7 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] Aborting claim: {{(pid=62096) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 695.697782] env[62096]: DEBUG oslo_concurrency.lockutils [None req-7f410855-d894-4223-982d-162ddb3e3fc7 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 695.719380] env[62096]: DEBUG nova.compute.utils [None req-fdc6c660-999b-45f5-b0c3-7b576c288147 tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Using /dev/sd instead of None {{(pid=62096) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 695.719991] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-fdc6c660-999b-45f5-b0c3-7b576c288147 tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Expecting reply to msg 6bd9cb8f88c54da39307fb449e8e9ade in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 695.722523] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4c22e505-acaf-4907-b777-48f6dcefcfb3 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362-project-member] Expecting reply to msg 3b4513b467814c4ea8bcfc1c444b984f in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 695.722991] env[62096]: DEBUG nova.compute.manager [None req-fdc6c660-999b-45f5-b0c3-7b576c288147 tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] Allocating IP information in the background. {{(pid=62096) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 695.723158] env[62096]: DEBUG nova.network.neutron [None req-fdc6c660-999b-45f5-b0c3-7b576c288147 tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] allocate_for_instance() {{(pid=62096) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 695.730520] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6bd9cb8f88c54da39307fb449e8e9ade [ 695.739493] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3b4513b467814c4ea8bcfc1c444b984f [ 695.745611] env[62096]: DEBUG oslo_concurrency.lockutils [None req-70f080d3-692d-46fb-a741-1ba8c3b3362d tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] Lock "06b9105a-8dcb-4bff-bba2-05e179036f24" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.557s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 695.745945] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-70f080d3-692d-46fb-a741-1ba8c3b3362d tempest-ServerDiagnosticsV248Test-257505407 tempest-ServerDiagnosticsV248Test-257505407-project-member] Expecting reply to msg 5d2ebd68230d4f23a2f33f74894f9eff in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 695.760579] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5d2ebd68230d4f23a2f33f74894f9eff [ 695.768633] env[62096]: DEBUG nova.policy [None req-fdc6c660-999b-45f5-b0c3-7b576c288147 tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f1206258c91447199b618ada0b11a1c7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3f1d74f0ba244922a51756b6fc7e3e19', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62096) authorize /opt/stack/nova/nova/policy.py:203}} [ 695.918320] env[62096]: INFO nova.scheduler.client.report [None req-b67d5e24-acb6-4b89-8ad7-369461375b68 tempest-ServerExternalEventsTest-882999065 tempest-ServerExternalEventsTest-882999065-project-member] Deleted allocations for instance 62b778aa-71c7-480b-8148-017773246caf [ 695.924048] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b67d5e24-acb6-4b89-8ad7-369461375b68 tempest-ServerExternalEventsTest-882999065 tempest-ServerExternalEventsTest-882999065-project-member] Expecting reply to msg 5c7dcb1786bf40778fed448585819064 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 695.935137] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5c7dcb1786bf40778fed448585819064 [ 696.044188] env[62096]: DEBUG nova.network.neutron [None req-fdc6c660-999b-45f5-b0c3-7b576c288147 tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] Successfully created port: cf058cb6-01d6-407d-b545-720a102be194 {{(pid=62096) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 696.223600] env[62096]: DEBUG nova.compute.manager [None req-fdc6c660-999b-45f5-b0c3-7b576c288147 tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] Start building block device mappings for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 696.225503] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-fdc6c660-999b-45f5-b0c3-7b576c288147 tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Expecting reply to msg dddc91189bb047c09c374754ea9a685e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 696.262220] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dddc91189bb047c09c374754ea9a685e [ 696.428585] env[62096]: DEBUG oslo_concurrency.lockutils [None req-b67d5e24-acb6-4b89-8ad7-369461375b68 tempest-ServerExternalEventsTest-882999065 tempest-ServerExternalEventsTest-882999065-project-member] Lock "62b778aa-71c7-480b-8148-017773246caf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 117.839s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 696.429467] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-6d518a50-dadb-4b6d-9314-0f65d9990dab tempest-ServersNegativeTestMultiTenantJSON-1390208586 tempest-ServersNegativeTestMultiTenantJSON-1390208586-project-member] Expecting reply to msg 98425c19915648c383bc0e1cf89514d7 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 696.441826] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 98425c19915648c383bc0e1cf89514d7 [ 696.611382] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2002b28-c8d5-4fcc-9d3a-d283c33772a8 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.619315] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6248725f-31fa-45e0-bf7a-b477f8c38b5b {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.651939] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96c9e805-30a6-43da-8154-eb52bac5296a {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.659738] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41127fd1-e961-423a-8247-c9de5568ec15 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.673604] env[62096]: DEBUG nova.compute.provider_tree [None req-4c22e505-acaf-4907-b777-48f6dcefcfb3 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 696.674142] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4c22e505-acaf-4907-b777-48f6dcefcfb3 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362-project-member] Expecting reply to msg 8510ed4de2ca4d9c97af71b4951b78a6 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 696.683562] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8510ed4de2ca4d9c97af71b4951b78a6 [ 696.732741] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-fdc6c660-999b-45f5-b0c3-7b576c288147 tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Expecting reply to msg 8f5d60067d0f46d6915eadf078ff2b58 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 696.769037] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8f5d60067d0f46d6915eadf078ff2b58 [ 696.932393] env[62096]: DEBUG nova.compute.manager [None req-6d518a50-dadb-4b6d-9314-0f65d9990dab tempest-ServersNegativeTestMultiTenantJSON-1390208586 tempest-ServersNegativeTestMultiTenantJSON-1390208586-project-member] [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] Starting instance... {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 696.934027] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-6d518a50-dadb-4b6d-9314-0f65d9990dab tempest-ServersNegativeTestMultiTenantJSON-1390208586 tempest-ServersNegativeTestMultiTenantJSON-1390208586-project-member] Expecting reply to msg ae339469f5144be8a205e8ff82df3e6f in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 696.979501] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ae339469f5144be8a205e8ff82df3e6f [ 697.051799] env[62096]: DEBUG nova.compute.manager [req-42f01614-826c-4857-946a-9d7eca8fe87e req-900b9916-f124-44ba-8f37-c33191eb26ef service nova] [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] Received event network-changed-cf058cb6-01d6-407d-b545-720a102be194 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 697.052085] env[62096]: DEBUG nova.compute.manager [req-42f01614-826c-4857-946a-9d7eca8fe87e req-900b9916-f124-44ba-8f37-c33191eb26ef service nova] [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] Refreshing instance network info cache due to event network-changed-cf058cb6-01d6-407d-b545-720a102be194. {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 697.052310] env[62096]: DEBUG oslo_concurrency.lockutils [req-42f01614-826c-4857-946a-9d7eca8fe87e req-900b9916-f124-44ba-8f37-c33191eb26ef service nova] Acquiring lock "refresh_cache-1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 697.052447] env[62096]: DEBUG oslo_concurrency.lockutils [req-42f01614-826c-4857-946a-9d7eca8fe87e req-900b9916-f124-44ba-8f37-c33191eb26ef service nova] Acquired lock "refresh_cache-1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 697.052600] env[62096]: DEBUG nova.network.neutron [req-42f01614-826c-4857-946a-9d7eca8fe87e req-900b9916-f124-44ba-8f37-c33191eb26ef service nova] [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] Refreshing network info cache for port cf058cb6-01d6-407d-b545-720a102be194 {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 697.053004] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-42f01614-826c-4857-946a-9d7eca8fe87e req-900b9916-f124-44ba-8f37-c33191eb26ef service nova] Expecting reply to msg 84d4cdf2efb74b809ab0231f051a0e24 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 697.060414] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 84d4cdf2efb74b809ab0231f051a0e24 [ 697.133573] env[62096]: ERROR nova.compute.manager [None req-fdc6c660-999b-45f5-b0c3-7b576c288147 tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port cf058cb6-01d6-407d-b545-720a102be194, please check neutron logs for more information. [ 697.133573] env[62096]: ERROR nova.compute.manager Traceback (most recent call last): [ 697.133573] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 697.133573] env[62096]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 697.133573] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 697.133573] env[62096]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 697.133573] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 697.133573] env[62096]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 697.133573] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 697.133573] env[62096]: ERROR nova.compute.manager self.force_reraise() [ 697.133573] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 697.133573] env[62096]: ERROR nova.compute.manager raise self.value [ 697.133573] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 697.133573] env[62096]: ERROR nova.compute.manager updated_port = self._update_port( [ 697.133573] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 697.133573] env[62096]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 697.134165] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 697.134165] env[62096]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 697.134165] env[62096]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port cf058cb6-01d6-407d-b545-720a102be194, please check neutron logs for more information. [ 697.134165] env[62096]: ERROR nova.compute.manager [ 697.134165] env[62096]: Traceback (most recent call last): [ 697.134165] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 697.134165] env[62096]: listener.cb(fileno) [ 697.134165] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 697.134165] env[62096]: result = function(*args, **kwargs) [ 697.134165] env[62096]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 697.134165] env[62096]: return func(*args, **kwargs) [ 697.134165] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 697.134165] env[62096]: raise e [ 697.134165] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 697.134165] env[62096]: nwinfo = self.network_api.allocate_for_instance( [ 697.134165] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 697.134165] env[62096]: created_port_ids = self._update_ports_for_instance( [ 697.134165] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 697.134165] env[62096]: with excutils.save_and_reraise_exception(): [ 697.134165] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 697.134165] env[62096]: self.force_reraise() [ 697.134165] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 697.134165] env[62096]: raise self.value [ 697.134165] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 697.134165] env[62096]: updated_port = self._update_port( [ 697.134165] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 697.134165] env[62096]: _ensure_no_port_binding_failure(port) [ 697.134165] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 697.134165] env[62096]: raise exception.PortBindingFailed(port_id=port['id']) [ 697.135005] env[62096]: nova.exception.PortBindingFailed: Binding failed for port cf058cb6-01d6-407d-b545-720a102be194, please check neutron logs for more information. [ 697.135005] env[62096]: Removing descriptor: 14 [ 697.176764] env[62096]: DEBUG nova.scheduler.client.report [None req-4c22e505-acaf-4907-b777-48f6dcefcfb3 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 697.180045] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4c22e505-acaf-4907-b777-48f6dcefcfb3 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362-project-member] Expecting reply to msg c56cdd9c820c41ecbb7d20b516089d72 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 697.207840] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c56cdd9c820c41ecbb7d20b516089d72 [ 697.238226] env[62096]: DEBUG nova.compute.manager [None req-fdc6c660-999b-45f5-b0c3-7b576c288147 tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] Start spawning the instance on the hypervisor. {{(pid=62096) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 697.261654] env[62096]: DEBUG nova.virt.hardware [None req-fdc6c660-999b-45f5-b0c3-7b576c288147 tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-19T11:52:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-19T11:51:55Z,direct_url=,disk_format='vmdk',id=fb764baa-3805-45c4-a694-aa91b0932110,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bd76085d004b408bbcf1ebf76ba2593e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-19T11:51:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 697.262228] env[62096]: DEBUG nova.virt.hardware [None req-fdc6c660-999b-45f5-b0c3-7b576c288147 tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Flavor limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 697.262579] env[62096]: DEBUG nova.virt.hardware [None req-fdc6c660-999b-45f5-b0c3-7b576c288147 tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Image limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 697.262974] env[62096]: DEBUG nova.virt.hardware [None req-fdc6c660-999b-45f5-b0c3-7b576c288147 tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Flavor pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 697.263233] env[62096]: DEBUG nova.virt.hardware [None req-fdc6c660-999b-45f5-b0c3-7b576c288147 tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Image pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 697.263481] env[62096]: DEBUG nova.virt.hardware [None req-fdc6c660-999b-45f5-b0c3-7b576c288147 tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 697.263788] env[62096]: DEBUG nova.virt.hardware [None req-fdc6c660-999b-45f5-b0c3-7b576c288147 tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 697.264094] env[62096]: DEBUG nova.virt.hardware [None req-fdc6c660-999b-45f5-b0c3-7b576c288147 tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 697.264369] env[62096]: DEBUG nova.virt.hardware [None req-fdc6c660-999b-45f5-b0c3-7b576c288147 tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Got 1 possible topologies {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 697.264634] env[62096]: DEBUG nova.virt.hardware [None req-fdc6c660-999b-45f5-b0c3-7b576c288147 tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 697.264931] env[62096]: DEBUG nova.virt.hardware [None req-fdc6c660-999b-45f5-b0c3-7b576c288147 tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 697.265888] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2372198d-f4a5-4ce5-b77e-b7b33c110414 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.274643] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dccb47e0-9f89-45de-85ab-a450ce1e4551 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.299715] env[62096]: ERROR nova.compute.manager [None req-fdc6c660-999b-45f5-b0c3-7b576c288147 tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port cf058cb6-01d6-407d-b545-720a102be194, please check neutron logs for more information. [ 697.299715] env[62096]: ERROR nova.compute.manager [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] Traceback (most recent call last): [ 697.299715] env[62096]: ERROR nova.compute.manager [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 697.299715] env[62096]: ERROR nova.compute.manager [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] yield resources [ 697.299715] env[62096]: ERROR nova.compute.manager [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 697.299715] env[62096]: ERROR nova.compute.manager [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] self.driver.spawn(context, instance, image_meta, [ 697.299715] env[62096]: ERROR nova.compute.manager [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 697.299715] env[62096]: ERROR nova.compute.manager [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 697.299715] env[62096]: ERROR nova.compute.manager [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 697.299715] env[62096]: ERROR nova.compute.manager [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] vm_ref = self.build_virtual_machine(instance, [ 697.299715] env[62096]: ERROR nova.compute.manager [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 697.300133] env[62096]: ERROR nova.compute.manager [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] vif_infos = vmwarevif.get_vif_info(self._session, [ 697.300133] env[62096]: ERROR nova.compute.manager [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 697.300133] env[62096]: ERROR nova.compute.manager [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] for vif in network_info: [ 697.300133] env[62096]: ERROR nova.compute.manager [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 697.300133] env[62096]: ERROR nova.compute.manager [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] return self._sync_wrapper(fn, *args, **kwargs) [ 697.300133] env[62096]: ERROR nova.compute.manager [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 697.300133] env[62096]: ERROR nova.compute.manager [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] self.wait() [ 697.300133] env[62096]: ERROR nova.compute.manager [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 697.300133] env[62096]: ERROR nova.compute.manager [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] self[:] = self._gt.wait() [ 697.300133] env[62096]: ERROR nova.compute.manager [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 697.300133] env[62096]: ERROR nova.compute.manager [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] return self._exit_event.wait() [ 697.300133] env[62096]: ERROR nova.compute.manager [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 697.300133] env[62096]: ERROR nova.compute.manager [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] current.throw(*self._exc) [ 697.300581] env[62096]: ERROR nova.compute.manager [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 697.300581] env[62096]: ERROR nova.compute.manager [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] result = function(*args, **kwargs) [ 697.300581] env[62096]: ERROR nova.compute.manager [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 697.300581] env[62096]: ERROR nova.compute.manager [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] return func(*args, **kwargs) [ 697.300581] env[62096]: ERROR nova.compute.manager [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 697.300581] env[62096]: ERROR nova.compute.manager [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] raise e [ 697.300581] env[62096]: ERROR nova.compute.manager [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 697.300581] env[62096]: ERROR nova.compute.manager [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] nwinfo = self.network_api.allocate_for_instance( [ 697.300581] env[62096]: ERROR nova.compute.manager [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 697.300581] env[62096]: ERROR nova.compute.manager [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] created_port_ids = self._update_ports_for_instance( [ 697.300581] env[62096]: ERROR nova.compute.manager [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 697.300581] env[62096]: ERROR nova.compute.manager [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] with excutils.save_and_reraise_exception(): [ 697.300581] env[62096]: ERROR nova.compute.manager [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 697.300993] env[62096]: ERROR nova.compute.manager [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] self.force_reraise() [ 697.300993] env[62096]: ERROR nova.compute.manager [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 697.300993] env[62096]: ERROR nova.compute.manager [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] raise self.value [ 697.300993] env[62096]: ERROR nova.compute.manager [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 697.300993] env[62096]: ERROR nova.compute.manager [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] updated_port = self._update_port( [ 697.300993] env[62096]: ERROR nova.compute.manager [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 697.300993] env[62096]: ERROR nova.compute.manager [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] _ensure_no_port_binding_failure(port) [ 697.300993] env[62096]: ERROR nova.compute.manager [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 697.300993] env[62096]: ERROR nova.compute.manager [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] raise exception.PortBindingFailed(port_id=port['id']) [ 697.300993] env[62096]: ERROR nova.compute.manager [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] nova.exception.PortBindingFailed: Binding failed for port cf058cb6-01d6-407d-b545-720a102be194, please check neutron logs for more information. [ 697.300993] env[62096]: ERROR nova.compute.manager [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] [ 697.300993] env[62096]: INFO nova.compute.manager [None req-fdc6c660-999b-45f5-b0c3-7b576c288147 tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] Terminating instance [ 697.302230] env[62096]: DEBUG oslo_concurrency.lockutils [None req-fdc6c660-999b-45f5-b0c3-7b576c288147 tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Acquiring lock "refresh_cache-1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 697.457399] env[62096]: DEBUG oslo_concurrency.lockutils [None req-6d518a50-dadb-4b6d-9314-0f65d9990dab tempest-ServersNegativeTestMultiTenantJSON-1390208586 tempest-ServersNegativeTestMultiTenantJSON-1390208586-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 697.572471] env[62096]: DEBUG nova.network.neutron [req-42f01614-826c-4857-946a-9d7eca8fe87e req-900b9916-f124-44ba-8f37-c33191eb26ef service nova] [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 697.683042] env[62096]: DEBUG oslo_concurrency.lockutils [None req-4c22e505-acaf-4907-b777-48f6dcefcfb3 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.471s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 697.683656] env[62096]: DEBUG nova.compute.manager [None req-4c22e505-acaf-4907-b777-48f6dcefcfb3 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362-project-member] [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] Start building networks asynchronously for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 697.685467] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4c22e505-acaf-4907-b777-48f6dcefcfb3 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362-project-member] Expecting reply to msg 9f69db97ef01475999f4650d7ba0a6af in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 697.686673] env[62096]: DEBUG oslo_concurrency.lockutils [None req-74a880f6-b7ea-4e56-b7f7-9e11c1a0c4ac tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.940s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 697.688328] env[62096]: INFO nova.compute.claims [None req-74a880f6-b7ea-4e56-b7f7-9e11c1a0c4ac tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 697.690022] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-74a880f6-b7ea-4e56-b7f7-9e11c1a0c4ac tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Expecting reply to msg a42f868b68e54ff19e91893e1cca47f0 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 697.728830] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9f69db97ef01475999f4650d7ba0a6af [ 697.744814] env[62096]: DEBUG nova.network.neutron [req-42f01614-826c-4857-946a-9d7eca8fe87e req-900b9916-f124-44ba-8f37-c33191eb26ef service nova] [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 697.745374] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-42f01614-826c-4857-946a-9d7eca8fe87e req-900b9916-f124-44ba-8f37-c33191eb26ef service nova] Expecting reply to msg ed5d034d4b4849ce977b0ed38a92c71e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 697.746998] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a42f868b68e54ff19e91893e1cca47f0 [ 697.758809] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ed5d034d4b4849ce977b0ed38a92c71e [ 698.194886] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-74a880f6-b7ea-4e56-b7f7-9e11c1a0c4ac tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Expecting reply to msg 98497fb3316b42eaac06e2e3a1466f2c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 698.197283] env[62096]: DEBUG nova.compute.utils [None req-4c22e505-acaf-4907-b777-48f6dcefcfb3 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362-project-member] Using /dev/sd instead of None {{(pid=62096) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 698.199115] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4c22e505-acaf-4907-b777-48f6dcefcfb3 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362-project-member] Expecting reply to msg 06caf99c377940c38de3d074fac41352 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 698.200770] env[62096]: DEBUG nova.compute.manager [None req-4c22e505-acaf-4907-b777-48f6dcefcfb3 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362-project-member] [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] Allocating IP information in the background. {{(pid=62096) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 698.201073] env[62096]: DEBUG nova.network.neutron [None req-4c22e505-acaf-4907-b777-48f6dcefcfb3 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362-project-member] [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] allocate_for_instance() {{(pid=62096) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 698.205164] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 98497fb3316b42eaac06e2e3a1466f2c [ 698.211013] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 06caf99c377940c38de3d074fac41352 [ 698.249182] env[62096]: DEBUG oslo_concurrency.lockutils [req-42f01614-826c-4857-946a-9d7eca8fe87e req-900b9916-f124-44ba-8f37-c33191eb26ef service nova] Releasing lock "refresh_cache-1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 698.249983] env[62096]: DEBUG oslo_concurrency.lockutils [None req-fdc6c660-999b-45f5-b0c3-7b576c288147 tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Acquired lock "refresh_cache-1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 698.250225] env[62096]: DEBUG nova.network.neutron [None req-fdc6c660-999b-45f5-b0c3-7b576c288147 tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 698.250692] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-fdc6c660-999b-45f5-b0c3-7b576c288147 tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Expecting reply to msg 4485e4aae7984459ab4d8bb25c1787f5 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 698.257080] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4485e4aae7984459ab4d8bb25c1787f5 [ 698.299020] env[62096]: DEBUG nova.policy [None req-4c22e505-acaf-4907-b777-48f6dcefcfb3 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ef3aee95fca2495381ad1bcad2b43c75', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd89816f1a0934530beec5a5022c37338', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62096) authorize /opt/stack/nova/nova/policy.py:203}} [ 698.588145] env[62096]: DEBUG nova.network.neutron [None req-4c22e505-acaf-4907-b777-48f6dcefcfb3 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362-project-member] [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] Successfully created port: 2d7f49b1-591c-4fbb-8ac2-6e9c7b079525 {{(pid=62096) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 698.704497] env[62096]: DEBUG nova.compute.manager [None req-4c22e505-acaf-4907-b777-48f6dcefcfb3 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362-project-member] [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] Start building block device mappings for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 698.706202] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4c22e505-acaf-4907-b777-48f6dcefcfb3 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362-project-member] Expecting reply to msg b4d7e2f404ae42f88f3dfbabb1ba77d6 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 698.752819] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b4d7e2f404ae42f88f3dfbabb1ba77d6 [ 698.791927] env[62096]: DEBUG nova.network.neutron [None req-fdc6c660-999b-45f5-b0c3-7b576c288147 tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 699.124918] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e2fa76d-6167-44ed-bc21-b32d4276e330 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.134422] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5eb55c86-32f5-44af-b66b-9ae3cd6802c6 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.167471] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35a2835d-3aba-4431-b309-6e487b90443a {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.174927] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a70b2c86-4aef-4fd1-a205-d0ffd2803b54 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.189992] env[62096]: DEBUG nova.compute.provider_tree [None req-74a880f6-b7ea-4e56-b7f7-9e11c1a0c4ac tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 699.190506] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-74a880f6-b7ea-4e56-b7f7-9e11c1a0c4ac tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Expecting reply to msg dfccae796f9b4588b35ecb7e5e1e3a82 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 699.194963] env[62096]: DEBUG nova.network.neutron [None req-fdc6c660-999b-45f5-b0c3-7b576c288147 tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 699.195057] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-fdc6c660-999b-45f5-b0c3-7b576c288147 tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Expecting reply to msg 494a81ac05ca4d6f9a61307231c81e9b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 699.203474] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dfccae796f9b4588b35ecb7e5e1e3a82 [ 699.204227] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 494a81ac05ca4d6f9a61307231c81e9b [ 699.212525] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4c22e505-acaf-4907-b777-48f6dcefcfb3 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362-project-member] Expecting reply to msg f1988871fd7048fa9080881783dc6d51 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 699.264848] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f1988871fd7048fa9080881783dc6d51 [ 699.432527] env[62096]: DEBUG nova.compute.manager [req-2aa532a2-415a-42c6-93df-555ab3feaf0e req-88b80395-318a-4ddb-8703-361bf0028adc service nova] [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] Received event network-vif-deleted-cf058cb6-01d6-407d-b545-720a102be194 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 699.569207] env[62096]: ERROR nova.compute.manager [None req-4c22e505-acaf-4907-b777-48f6dcefcfb3 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 2d7f49b1-591c-4fbb-8ac2-6e9c7b079525, please check neutron logs for more information. [ 699.569207] env[62096]: ERROR nova.compute.manager Traceback (most recent call last): [ 699.569207] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 699.569207] env[62096]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 699.569207] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 699.569207] env[62096]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 699.569207] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 699.569207] env[62096]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 699.569207] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 699.569207] env[62096]: ERROR nova.compute.manager self.force_reraise() [ 699.569207] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 699.569207] env[62096]: ERROR nova.compute.manager raise self.value [ 699.569207] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 699.569207] env[62096]: ERROR nova.compute.manager updated_port = self._update_port( [ 699.569207] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 699.569207] env[62096]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 699.569655] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 699.569655] env[62096]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 699.569655] env[62096]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 2d7f49b1-591c-4fbb-8ac2-6e9c7b079525, please check neutron logs for more information. [ 699.569655] env[62096]: ERROR nova.compute.manager [ 699.569655] env[62096]: Traceback (most recent call last): [ 699.569655] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 699.569655] env[62096]: listener.cb(fileno) [ 699.569655] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 699.569655] env[62096]: result = function(*args, **kwargs) [ 699.569655] env[62096]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 699.569655] env[62096]: return func(*args, **kwargs) [ 699.569655] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 699.569655] env[62096]: raise e [ 699.569655] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 699.569655] env[62096]: nwinfo = self.network_api.allocate_for_instance( [ 699.569655] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 699.569655] env[62096]: created_port_ids = self._update_ports_for_instance( [ 699.569655] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 699.569655] env[62096]: with excutils.save_and_reraise_exception(): [ 699.569655] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 699.569655] env[62096]: self.force_reraise() [ 699.569655] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 699.569655] env[62096]: raise self.value [ 699.569655] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 699.569655] env[62096]: updated_port = self._update_port( [ 699.569655] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 699.569655] env[62096]: _ensure_no_port_binding_failure(port) [ 699.569655] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 699.569655] env[62096]: raise exception.PortBindingFailed(port_id=port['id']) [ 699.570521] env[62096]: nova.exception.PortBindingFailed: Binding failed for port 2d7f49b1-591c-4fbb-8ac2-6e9c7b079525, please check neutron logs for more information. [ 699.570521] env[62096]: Removing descriptor: 14 [ 699.692994] env[62096]: DEBUG nova.scheduler.client.report [None req-74a880f6-b7ea-4e56-b7f7-9e11c1a0c4ac tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 699.695323] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-74a880f6-b7ea-4e56-b7f7-9e11c1a0c4ac tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Expecting reply to msg 085be91e31bd4054910c2804a0e7cf30 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 699.700219] env[62096]: DEBUG oslo_concurrency.lockutils [None req-fdc6c660-999b-45f5-b0c3-7b576c288147 tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Releasing lock "refresh_cache-1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 699.700626] env[62096]: DEBUG nova.compute.manager [None req-fdc6c660-999b-45f5-b0c3-7b576c288147 tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] Start destroying the instance on the hypervisor. {{(pid=62096) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 699.701516] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-fdc6c660-999b-45f5-b0c3-7b576c288147 tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] Destroying instance {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 699.701516] env[62096]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-344cc322-64cf-4988-9a0d-383cf305bae4 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.709793] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-017f3221-5f1a-48e5-a045-f57c054a1c59 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.724162] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 085be91e31bd4054910c2804a0e7cf30 [ 699.725907] env[62096]: DEBUG nova.compute.manager [None req-4c22e505-acaf-4907-b777-48f6dcefcfb3 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362-project-member] [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] Start spawning the instance on the hypervisor. {{(pid=62096) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 699.740725] env[62096]: WARNING nova.virt.vmwareapi.vmops [None req-fdc6c660-999b-45f5-b0c3-7b576c288147 tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9 could not be found. [ 699.740960] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-fdc6c660-999b-45f5-b0c3-7b576c288147 tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] Instance destroyed {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 699.741349] env[62096]: INFO nova.compute.manager [None req-fdc6c660-999b-45f5-b0c3-7b576c288147 tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] Took 0.04 seconds to destroy the instance on the hypervisor. [ 699.741416] env[62096]: DEBUG oslo.service.loopingcall [None req-fdc6c660-999b-45f5-b0c3-7b576c288147 tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 699.741593] env[62096]: DEBUG nova.compute.manager [-] [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 699.741752] env[62096]: DEBUG nova.network.neutron [-] [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 699.750936] env[62096]: DEBUG nova.virt.hardware [None req-4c22e505-acaf-4907-b777-48f6dcefcfb3 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-19T11:52:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-19T11:51:55Z,direct_url=,disk_format='vmdk',id=fb764baa-3805-45c4-a694-aa91b0932110,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bd76085d004b408bbcf1ebf76ba2593e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-19T11:51:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 699.751158] env[62096]: DEBUG nova.virt.hardware [None req-4c22e505-acaf-4907-b777-48f6dcefcfb3 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362-project-member] Flavor limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 699.751314] env[62096]: DEBUG nova.virt.hardware [None req-4c22e505-acaf-4907-b777-48f6dcefcfb3 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362-project-member] Image limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 699.751495] env[62096]: DEBUG nova.virt.hardware [None req-4c22e505-acaf-4907-b777-48f6dcefcfb3 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362-project-member] Flavor pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 699.751640] env[62096]: DEBUG nova.virt.hardware [None req-4c22e505-acaf-4907-b777-48f6dcefcfb3 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362-project-member] Image pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 699.751875] env[62096]: DEBUG nova.virt.hardware [None req-4c22e505-acaf-4907-b777-48f6dcefcfb3 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 699.752031] env[62096]: DEBUG nova.virt.hardware [None req-4c22e505-acaf-4907-b777-48f6dcefcfb3 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 699.752201] env[62096]: DEBUG nova.virt.hardware [None req-4c22e505-acaf-4907-b777-48f6dcefcfb3 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 699.752370] env[62096]: DEBUG nova.virt.hardware [None req-4c22e505-acaf-4907-b777-48f6dcefcfb3 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362-project-member] Got 1 possible topologies {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 699.752529] env[62096]: DEBUG nova.virt.hardware [None req-4c22e505-acaf-4907-b777-48f6dcefcfb3 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 699.752696] env[62096]: DEBUG nova.virt.hardware [None req-4c22e505-acaf-4907-b777-48f6dcefcfb3 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 699.753609] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0a97995-0724-4cb3-b970-30013a60f8d4 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.757380] env[62096]: DEBUG nova.network.neutron [-] [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 699.758225] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 9c88bea7b5754bfbbf3c05eaaf732d48 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 699.762762] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44944cc9-ffdb-43c9-ac2c-512ef6c5986a {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.767741] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9c88bea7b5754bfbbf3c05eaaf732d48 [ 699.779778] env[62096]: ERROR nova.compute.manager [None req-4c22e505-acaf-4907-b777-48f6dcefcfb3 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362-project-member] [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 2d7f49b1-591c-4fbb-8ac2-6e9c7b079525, please check neutron logs for more information. [ 699.779778] env[62096]: ERROR nova.compute.manager [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] Traceback (most recent call last): [ 699.779778] env[62096]: ERROR nova.compute.manager [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 699.779778] env[62096]: ERROR nova.compute.manager [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] yield resources [ 699.779778] env[62096]: ERROR nova.compute.manager [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 699.779778] env[62096]: ERROR nova.compute.manager [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] self.driver.spawn(context, instance, image_meta, [ 699.779778] env[62096]: ERROR nova.compute.manager [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 699.779778] env[62096]: ERROR nova.compute.manager [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 699.779778] env[62096]: ERROR nova.compute.manager [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 699.779778] env[62096]: ERROR nova.compute.manager [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] vm_ref = self.build_virtual_machine(instance, [ 699.779778] env[62096]: ERROR nova.compute.manager [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 699.780254] env[62096]: ERROR nova.compute.manager [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] vif_infos = vmwarevif.get_vif_info(self._session, [ 699.780254] env[62096]: ERROR nova.compute.manager [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 699.780254] env[62096]: ERROR nova.compute.manager [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] for vif in network_info: [ 699.780254] env[62096]: ERROR nova.compute.manager [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 699.780254] env[62096]: ERROR nova.compute.manager [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] return self._sync_wrapper(fn, *args, **kwargs) [ 699.780254] env[62096]: ERROR nova.compute.manager [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 699.780254] env[62096]: ERROR nova.compute.manager [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] self.wait() [ 699.780254] env[62096]: ERROR nova.compute.manager [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 699.780254] env[62096]: ERROR nova.compute.manager [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] self[:] = self._gt.wait() [ 699.780254] env[62096]: ERROR nova.compute.manager [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 699.780254] env[62096]: ERROR nova.compute.manager [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] return self._exit_event.wait() [ 699.780254] env[62096]: ERROR nova.compute.manager [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 699.780254] env[62096]: ERROR nova.compute.manager [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] current.throw(*self._exc) [ 699.780939] env[62096]: ERROR nova.compute.manager [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 699.780939] env[62096]: ERROR nova.compute.manager [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] result = function(*args, **kwargs) [ 699.780939] env[62096]: ERROR nova.compute.manager [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 699.780939] env[62096]: ERROR nova.compute.manager [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] return func(*args, **kwargs) [ 699.780939] env[62096]: ERROR nova.compute.manager [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 699.780939] env[62096]: ERROR nova.compute.manager [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] raise e [ 699.780939] env[62096]: ERROR nova.compute.manager [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 699.780939] env[62096]: ERROR nova.compute.manager [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] nwinfo = self.network_api.allocate_for_instance( [ 699.780939] env[62096]: ERROR nova.compute.manager [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 699.780939] env[62096]: ERROR nova.compute.manager [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] created_port_ids = self._update_ports_for_instance( [ 699.780939] env[62096]: ERROR nova.compute.manager [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 699.780939] env[62096]: ERROR nova.compute.manager [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] with excutils.save_and_reraise_exception(): [ 699.780939] env[62096]: ERROR nova.compute.manager [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 699.781494] env[62096]: ERROR nova.compute.manager [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] self.force_reraise() [ 699.781494] env[62096]: ERROR nova.compute.manager [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 699.781494] env[62096]: ERROR nova.compute.manager [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] raise self.value [ 699.781494] env[62096]: ERROR nova.compute.manager [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 699.781494] env[62096]: ERROR nova.compute.manager [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] updated_port = self._update_port( [ 699.781494] env[62096]: ERROR nova.compute.manager [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 699.781494] env[62096]: ERROR nova.compute.manager [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] _ensure_no_port_binding_failure(port) [ 699.781494] env[62096]: ERROR nova.compute.manager [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 699.781494] env[62096]: ERROR nova.compute.manager [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] raise exception.PortBindingFailed(port_id=port['id']) [ 699.781494] env[62096]: ERROR nova.compute.manager [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] nova.exception.PortBindingFailed: Binding failed for port 2d7f49b1-591c-4fbb-8ac2-6e9c7b079525, please check neutron logs for more information. [ 699.781494] env[62096]: ERROR nova.compute.manager [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] [ 699.781494] env[62096]: INFO nova.compute.manager [None req-4c22e505-acaf-4907-b777-48f6dcefcfb3 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362-project-member] [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] Terminating instance [ 699.784679] env[62096]: DEBUG oslo_concurrency.lockutils [None req-4c22e505-acaf-4907-b777-48f6dcefcfb3 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362-project-member] Acquiring lock "refresh_cache-0d89651a-8d34-428a-a9e7-d56ce9d3c5b4" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 699.784875] env[62096]: DEBUG oslo_concurrency.lockutils [None req-4c22e505-acaf-4907-b777-48f6dcefcfb3 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362-project-member] Acquired lock "refresh_cache-0d89651a-8d34-428a-a9e7-d56ce9d3c5b4" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 699.785068] env[62096]: DEBUG nova.network.neutron [None req-4c22e505-acaf-4907-b777-48f6dcefcfb3 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362-project-member] [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 699.785499] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4c22e505-acaf-4907-b777-48f6dcefcfb3 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362-project-member] Expecting reply to msg 5ef48f80e23645039259f6c8b9bdd016 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 699.792992] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5ef48f80e23645039259f6c8b9bdd016 [ 700.202776] env[62096]: DEBUG oslo_concurrency.lockutils [None req-74a880f6-b7ea-4e56-b7f7-9e11c1a0c4ac tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.516s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 700.203323] env[62096]: DEBUG nova.compute.manager [None req-74a880f6-b7ea-4e56-b7f7-9e11c1a0c4ac tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] Start building networks asynchronously for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 700.205117] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-74a880f6-b7ea-4e56-b7f7-9e11c1a0c4ac tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Expecting reply to msg 7cd63b0a5cb949c5892875a6d260f743 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 700.206157] env[62096]: DEBUG oslo_concurrency.lockutils [None req-95f70777-dc9c-4d7d-99c8-605de6557417 tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 28.559s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 700.214296] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-95f70777-dc9c-4d7d-99c8-605de6557417 tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Expecting reply to msg 37d9f57b2ee44b918e78c38935980235 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 700.251608] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 37d9f57b2ee44b918e78c38935980235 [ 700.251608] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7cd63b0a5cb949c5892875a6d260f743 [ 700.262506] env[62096]: DEBUG nova.network.neutron [-] [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 700.262955] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg bd5469fe4f364e6cbe75797cba43df72 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 700.271049] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bd5469fe4f364e6cbe75797cba43df72 [ 700.307759] env[62096]: DEBUG nova.network.neutron [None req-4c22e505-acaf-4907-b777-48f6dcefcfb3 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362-project-member] [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 700.422005] env[62096]: DEBUG nova.network.neutron [None req-4c22e505-acaf-4907-b777-48f6dcefcfb3 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362-project-member] [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 700.422954] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4c22e505-acaf-4907-b777-48f6dcefcfb3 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362-project-member] Expecting reply to msg 53976c3aa14d464bb151f0f2301fcd5d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 700.431430] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 53976c3aa14d464bb151f0f2301fcd5d [ 700.710507] env[62096]: DEBUG nova.compute.utils [None req-74a880f6-b7ea-4e56-b7f7-9e11c1a0c4ac tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Using /dev/sd instead of None {{(pid=62096) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 700.711139] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-74a880f6-b7ea-4e56-b7f7-9e11c1a0c4ac tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Expecting reply to msg abcf2479208f41a49b44f903d77290ad in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 700.715577] env[62096]: DEBUG nova.compute.manager [None req-74a880f6-b7ea-4e56-b7f7-9e11c1a0c4ac tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] Allocating IP information in the background. {{(pid=62096) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 700.715577] env[62096]: DEBUG nova.network.neutron [None req-74a880f6-b7ea-4e56-b7f7-9e11c1a0c4ac tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] allocate_for_instance() {{(pid=62096) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 700.728717] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg abcf2479208f41a49b44f903d77290ad [ 700.762658] env[62096]: DEBUG nova.policy [None req-74a880f6-b7ea-4e56-b7f7-9e11c1a0c4ac tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f1206258c91447199b618ada0b11a1c7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3f1d74f0ba244922a51756b6fc7e3e19', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62096) authorize /opt/stack/nova/nova/policy.py:203}} [ 700.764628] env[62096]: INFO nova.compute.manager [-] [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] Took 1.02 seconds to deallocate network for instance. [ 700.767069] env[62096]: DEBUG nova.compute.claims [None req-fdc6c660-999b-45f5-b0c3-7b576c288147 tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] Aborting claim: {{(pid=62096) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 700.767326] env[62096]: DEBUG oslo_concurrency.lockutils [None req-fdc6c660-999b-45f5-b0c3-7b576c288147 tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 700.924656] env[62096]: DEBUG oslo_concurrency.lockutils [None req-4c22e505-acaf-4907-b777-48f6dcefcfb3 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362-project-member] Releasing lock "refresh_cache-0d89651a-8d34-428a-a9e7-d56ce9d3c5b4" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 700.925145] env[62096]: DEBUG nova.compute.manager [None req-4c22e505-acaf-4907-b777-48f6dcefcfb3 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362-project-member] [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] Start destroying the instance on the hypervisor. {{(pid=62096) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 700.925382] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-4c22e505-acaf-4907-b777-48f6dcefcfb3 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362-project-member] [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] Destroying instance {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 700.925692] env[62096]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6a51ea42-d806-40dd-9bac-33fc8e8d4a31 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.940717] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94619373-0574-440f-ae18-5eb62a66cc45 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.972271] env[62096]: WARNING nova.virt.vmwareapi.vmops [None req-4c22e505-acaf-4907-b777-48f6dcefcfb3 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362-project-member] [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4 could not be found. [ 700.972506] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-4c22e505-acaf-4907-b777-48f6dcefcfb3 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362-project-member] [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] Instance destroyed {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 700.972683] env[62096]: INFO nova.compute.manager [None req-4c22e505-acaf-4907-b777-48f6dcefcfb3 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362-project-member] [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] Took 0.05 seconds to destroy the instance on the hypervisor. [ 700.972923] env[62096]: DEBUG oslo.service.loopingcall [None req-4c22e505-acaf-4907-b777-48f6dcefcfb3 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 700.975553] env[62096]: DEBUG nova.compute.manager [-] [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 700.975645] env[62096]: DEBUG nova.network.neutron [-] [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 701.005113] env[62096]: DEBUG nova.network.neutron [-] [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 701.005624] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 708b8b62b0834b189725f75481c3f22e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 701.023051] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 708b8b62b0834b189725f75481c3f22e [ 701.037915] env[62096]: DEBUG nova.network.neutron [None req-74a880f6-b7ea-4e56-b7f7-9e11c1a0c4ac tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] Successfully created port: d00a01dc-5cbe-464a-939f-07ed16a2758d {{(pid=62096) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 701.175949] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5d06a4f-24d9-4949-9d40-8b137b4afe16 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.184243] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c928e8fd-0c7a-43b0-a337-0d3720a0ac84 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.214638] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29d7d7f0-fcad-40df-ba9e-c20defbdf172 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.217623] env[62096]: DEBUG nova.compute.manager [None req-74a880f6-b7ea-4e56-b7f7-9e11c1a0c4ac tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] Start building block device mappings for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 701.219706] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-74a880f6-b7ea-4e56-b7f7-9e11c1a0c4ac tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Expecting reply to msg 9f116989f0a647f49cf5c20f70140930 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 701.225397] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ab2432f-4c46-4abe-b885-2becdd078585 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.239530] env[62096]: DEBUG nova.compute.provider_tree [None req-95f70777-dc9c-4d7d-99c8-605de6557417 tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 701.240055] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-95f70777-dc9c-4d7d-99c8-605de6557417 tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Expecting reply to msg adc3217fc4244d67b92295ef9448ccfa in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 701.248749] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg adc3217fc4244d67b92295ef9448ccfa [ 701.286036] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9f116989f0a647f49cf5c20f70140930 [ 701.508551] env[62096]: DEBUG nova.network.neutron [-] [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 701.509028] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 781ea5b329a94541b1d3f0e9df46eb02 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 701.518209] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 781ea5b329a94541b1d3f0e9df46eb02 [ 701.545101] env[62096]: DEBUG nova.compute.manager [req-762ca3fd-5706-422d-8be1-085aedb79f87 req-31a19fb7-1155-49d5-9c0f-f04273b3f6d9 service nova] [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] Received event network-changed-2d7f49b1-591c-4fbb-8ac2-6e9c7b079525 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 701.545306] env[62096]: DEBUG nova.compute.manager [req-762ca3fd-5706-422d-8be1-085aedb79f87 req-31a19fb7-1155-49d5-9c0f-f04273b3f6d9 service nova] [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] Refreshing instance network info cache due to event network-changed-2d7f49b1-591c-4fbb-8ac2-6e9c7b079525. {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 701.545652] env[62096]: DEBUG oslo_concurrency.lockutils [req-762ca3fd-5706-422d-8be1-085aedb79f87 req-31a19fb7-1155-49d5-9c0f-f04273b3f6d9 service nova] Acquiring lock "refresh_cache-0d89651a-8d34-428a-a9e7-d56ce9d3c5b4" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 701.545652] env[62096]: DEBUG oslo_concurrency.lockutils [req-762ca3fd-5706-422d-8be1-085aedb79f87 req-31a19fb7-1155-49d5-9c0f-f04273b3f6d9 service nova] Acquired lock "refresh_cache-0d89651a-8d34-428a-a9e7-d56ce9d3c5b4" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 701.546143] env[62096]: DEBUG nova.network.neutron [req-762ca3fd-5706-422d-8be1-085aedb79f87 req-31a19fb7-1155-49d5-9c0f-f04273b3f6d9 service nova] [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] Refreshing network info cache for port 2d7f49b1-591c-4fbb-8ac2-6e9c7b079525 {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 701.546655] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-762ca3fd-5706-422d-8be1-085aedb79f87 req-31a19fb7-1155-49d5-9c0f-f04273b3f6d9 service nova] Expecting reply to msg 0adad2980f1d4efaa2475772cfe931c0 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 701.552583] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0adad2980f1d4efaa2475772cfe931c0 [ 701.724370] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-74a880f6-b7ea-4e56-b7f7-9e11c1a0c4ac tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Expecting reply to msg fc55fff30ac64177adece7ff9b2043e2 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 701.742805] env[62096]: DEBUG nova.scheduler.client.report [None req-95f70777-dc9c-4d7d-99c8-605de6557417 tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 701.745297] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-95f70777-dc9c-4d7d-99c8-605de6557417 tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Expecting reply to msg 9d0e617e97b44a638eb98d8e654faa15 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 701.762958] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fc55fff30ac64177adece7ff9b2043e2 [ 701.776296] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9d0e617e97b44a638eb98d8e654faa15 [ 701.871728] env[62096]: DEBUG oslo_concurrency.lockutils [None req-cd2ebb42-6f90-4faf-9937-567b5002bce1 tempest-ServerMetadataNegativeTestJSON-946662778 tempest-ServerMetadataNegativeTestJSON-946662778-project-member] Acquiring lock "7a85b800-725c-4d91-90bd-2056eb2fb116" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 701.872047] env[62096]: DEBUG oslo_concurrency.lockutils [None req-cd2ebb42-6f90-4faf-9937-567b5002bce1 tempest-ServerMetadataNegativeTestJSON-946662778 tempest-ServerMetadataNegativeTestJSON-946662778-project-member] Lock "7a85b800-725c-4d91-90bd-2056eb2fb116" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 701.974059] env[62096]: ERROR nova.compute.manager [None req-74a880f6-b7ea-4e56-b7f7-9e11c1a0c4ac tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port d00a01dc-5cbe-464a-939f-07ed16a2758d, please check neutron logs for more information. [ 701.974059] env[62096]: ERROR nova.compute.manager Traceback (most recent call last): [ 701.974059] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 701.974059] env[62096]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 701.974059] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 701.974059] env[62096]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 701.974059] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 701.974059] env[62096]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 701.974059] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 701.974059] env[62096]: ERROR nova.compute.manager self.force_reraise() [ 701.974059] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 701.974059] env[62096]: ERROR nova.compute.manager raise self.value [ 701.974059] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 701.974059] env[62096]: ERROR nova.compute.manager updated_port = self._update_port( [ 701.974059] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 701.974059] env[62096]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 701.974522] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 701.974522] env[62096]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 701.974522] env[62096]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port d00a01dc-5cbe-464a-939f-07ed16a2758d, please check neutron logs for more information. [ 701.974522] env[62096]: ERROR nova.compute.manager [ 701.974522] env[62096]: Traceback (most recent call last): [ 701.974522] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 701.974522] env[62096]: listener.cb(fileno) [ 701.974522] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 701.974522] env[62096]: result = function(*args, **kwargs) [ 701.974522] env[62096]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 701.974522] env[62096]: return func(*args, **kwargs) [ 701.974522] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 701.974522] env[62096]: raise e [ 701.974522] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 701.974522] env[62096]: nwinfo = self.network_api.allocate_for_instance( [ 701.974522] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 701.974522] env[62096]: created_port_ids = self._update_ports_for_instance( [ 701.974522] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 701.974522] env[62096]: with excutils.save_and_reraise_exception(): [ 701.974522] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 701.974522] env[62096]: self.force_reraise() [ 701.974522] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 701.974522] env[62096]: raise self.value [ 701.974522] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 701.974522] env[62096]: updated_port = self._update_port( [ 701.974522] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 701.974522] env[62096]: _ensure_no_port_binding_failure(port) [ 701.974522] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 701.974522] env[62096]: raise exception.PortBindingFailed(port_id=port['id']) [ 701.975328] env[62096]: nova.exception.PortBindingFailed: Binding failed for port d00a01dc-5cbe-464a-939f-07ed16a2758d, please check neutron logs for more information. [ 701.975328] env[62096]: Removing descriptor: 14 [ 702.011437] env[62096]: INFO nova.compute.manager [-] [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] Took 1.04 seconds to deallocate network for instance. [ 702.013942] env[62096]: DEBUG nova.compute.claims [None req-4c22e505-acaf-4907-b777-48f6dcefcfb3 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362-project-member] [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] Aborting claim: {{(pid=62096) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 702.014146] env[62096]: DEBUG oslo_concurrency.lockutils [None req-4c22e505-acaf-4907-b777-48f6dcefcfb3 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 702.062554] env[62096]: DEBUG nova.network.neutron [req-762ca3fd-5706-422d-8be1-085aedb79f87 req-31a19fb7-1155-49d5-9c0f-f04273b3f6d9 service nova] [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 702.135009] env[62096]: DEBUG nova.network.neutron [req-762ca3fd-5706-422d-8be1-085aedb79f87 req-31a19fb7-1155-49d5-9c0f-f04273b3f6d9 service nova] [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 702.135542] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-762ca3fd-5706-422d-8be1-085aedb79f87 req-31a19fb7-1155-49d5-9c0f-f04273b3f6d9 service nova] Expecting reply to msg d664ed8c4e52470ab4b7f0f7cc7e801e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 702.144905] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d664ed8c4e52470ab4b7f0f7cc7e801e [ 702.228055] env[62096]: DEBUG nova.compute.manager [None req-74a880f6-b7ea-4e56-b7f7-9e11c1a0c4ac tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] Start spawning the instance on the hypervisor. {{(pid=62096) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 702.251928] env[62096]: DEBUG oslo_concurrency.lockutils [None req-95f70777-dc9c-4d7d-99c8-605de6557417 tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.046s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 702.252747] env[62096]: ERROR nova.compute.manager [None req-95f70777-dc9c-4d7d-99c8-605de6557417 tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 71f35985-e6b7-42f9-b1d6-19ed301c27b5, please check neutron logs for more information. [ 702.252747] env[62096]: ERROR nova.compute.manager [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] Traceback (most recent call last): [ 702.252747] env[62096]: ERROR nova.compute.manager [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 702.252747] env[62096]: ERROR nova.compute.manager [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] self.driver.spawn(context, instance, image_meta, [ 702.252747] env[62096]: ERROR nova.compute.manager [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 702.252747] env[62096]: ERROR nova.compute.manager [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] self._vmops.spawn(context, instance, image_meta, injected_files, [ 702.252747] env[62096]: ERROR nova.compute.manager [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 702.252747] env[62096]: ERROR nova.compute.manager [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] vm_ref = self.build_virtual_machine(instance, [ 702.252747] env[62096]: ERROR nova.compute.manager [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 702.252747] env[62096]: ERROR nova.compute.manager [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] vif_infos = vmwarevif.get_vif_info(self._session, [ 702.252747] env[62096]: ERROR nova.compute.manager [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 702.253157] env[62096]: ERROR nova.compute.manager [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] for vif in network_info: [ 702.253157] env[62096]: ERROR nova.compute.manager [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 702.253157] env[62096]: ERROR nova.compute.manager [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] return self._sync_wrapper(fn, *args, **kwargs) [ 702.253157] env[62096]: ERROR nova.compute.manager [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 702.253157] env[62096]: ERROR nova.compute.manager [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] self.wait() [ 702.253157] env[62096]: ERROR nova.compute.manager [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 702.253157] env[62096]: ERROR nova.compute.manager [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] self[:] = self._gt.wait() [ 702.253157] env[62096]: ERROR nova.compute.manager [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 702.253157] env[62096]: ERROR nova.compute.manager [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] return self._exit_event.wait() [ 702.253157] env[62096]: ERROR nova.compute.manager [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 702.253157] env[62096]: ERROR nova.compute.manager [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] result = hub.switch() [ 702.253157] env[62096]: ERROR nova.compute.manager [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 702.253157] env[62096]: ERROR nova.compute.manager [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] return self.greenlet.switch() [ 702.253621] env[62096]: ERROR nova.compute.manager [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 702.253621] env[62096]: ERROR nova.compute.manager [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] result = function(*args, **kwargs) [ 702.253621] env[62096]: ERROR nova.compute.manager [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 702.253621] env[62096]: ERROR nova.compute.manager [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] return func(*args, **kwargs) [ 702.253621] env[62096]: ERROR nova.compute.manager [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 702.253621] env[62096]: ERROR nova.compute.manager [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] raise e [ 702.253621] env[62096]: ERROR nova.compute.manager [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 702.253621] env[62096]: ERROR nova.compute.manager [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] nwinfo = self.network_api.allocate_for_instance( [ 702.253621] env[62096]: ERROR nova.compute.manager [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 702.253621] env[62096]: ERROR nova.compute.manager [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] created_port_ids = self._update_ports_for_instance( [ 702.253621] env[62096]: ERROR nova.compute.manager [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 702.253621] env[62096]: ERROR nova.compute.manager [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] with excutils.save_and_reraise_exception(): [ 702.253621] env[62096]: ERROR nova.compute.manager [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 702.254018] env[62096]: ERROR nova.compute.manager [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] self.force_reraise() [ 702.254018] env[62096]: ERROR nova.compute.manager [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 702.254018] env[62096]: ERROR nova.compute.manager [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] raise self.value [ 702.254018] env[62096]: ERROR nova.compute.manager [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 702.254018] env[62096]: ERROR nova.compute.manager [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] updated_port = self._update_port( [ 702.254018] env[62096]: ERROR nova.compute.manager [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 702.254018] env[62096]: ERROR nova.compute.manager [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] _ensure_no_port_binding_failure(port) [ 702.254018] env[62096]: ERROR nova.compute.manager [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 702.254018] env[62096]: ERROR nova.compute.manager [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] raise exception.PortBindingFailed(port_id=port['id']) [ 702.254018] env[62096]: ERROR nova.compute.manager [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] nova.exception.PortBindingFailed: Binding failed for port 71f35985-e6b7-42f9-b1d6-19ed301c27b5, please check neutron logs for more information. [ 702.254018] env[62096]: ERROR nova.compute.manager [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] [ 702.254303] env[62096]: DEBUG nova.compute.utils [None req-95f70777-dc9c-4d7d-99c8-605de6557417 tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] Binding failed for port 71f35985-e6b7-42f9-b1d6-19ed301c27b5, please check neutron logs for more information. {{(pid=62096) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 702.258398] env[62096]: DEBUG nova.virt.hardware [None req-74a880f6-b7ea-4e56-b7f7-9e11c1a0c4ac tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-19T11:52:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-19T11:51:55Z,direct_url=,disk_format='vmdk',id=fb764baa-3805-45c4-a694-aa91b0932110,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bd76085d004b408bbcf1ebf76ba2593e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-19T11:51:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 702.258398] env[62096]: DEBUG nova.virt.hardware [None req-74a880f6-b7ea-4e56-b7f7-9e11c1a0c4ac tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Flavor limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 702.258398] env[62096]: DEBUG nova.virt.hardware [None req-74a880f6-b7ea-4e56-b7f7-9e11c1a0c4ac tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Image limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 702.258545] env[62096]: DEBUG nova.virt.hardware [None req-74a880f6-b7ea-4e56-b7f7-9e11c1a0c4ac tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Flavor pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 702.258545] env[62096]: DEBUG nova.virt.hardware [None req-74a880f6-b7ea-4e56-b7f7-9e11c1a0c4ac tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Image pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 702.258545] env[62096]: DEBUG nova.virt.hardware [None req-74a880f6-b7ea-4e56-b7f7-9e11c1a0c4ac tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 702.258545] env[62096]: DEBUG nova.virt.hardware [None req-74a880f6-b7ea-4e56-b7f7-9e11c1a0c4ac tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 702.258665] env[62096]: DEBUG nova.virt.hardware [None req-74a880f6-b7ea-4e56-b7f7-9e11c1a0c4ac tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 702.259593] env[62096]: DEBUG nova.virt.hardware [None req-74a880f6-b7ea-4e56-b7f7-9e11c1a0c4ac tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Got 1 possible topologies {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 702.259593] env[62096]: DEBUG nova.virt.hardware [None req-74a880f6-b7ea-4e56-b7f7-9e11c1a0c4ac tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 702.259593] env[62096]: DEBUG nova.virt.hardware [None req-74a880f6-b7ea-4e56-b7f7-9e11c1a0c4ac tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 702.259593] env[62096]: DEBUG oslo_concurrency.lockutils [None req-cdd99b9e-14f8-4543-8c76-d91f63d6fc9c tempest-ServerGroupTestJSON-613982516 tempest-ServerGroupTestJSON-613982516-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 29.412s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 702.261182] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-cdd99b9e-14f8-4543-8c76-d91f63d6fc9c tempest-ServerGroupTestJSON-613982516 tempest-ServerGroupTestJSON-613982516-project-member] Expecting reply to msg 3066db01f8524e85bbe1762daa20e634 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 702.262792] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d397bd4-42e3-48df-a10b-d0da7d647e09 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.265992] env[62096]: DEBUG nova.compute.manager [None req-95f70777-dc9c-4d7d-99c8-605de6557417 tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] Build of instance e5c75298-b9ef-4e28-a038-b55d8e198539 was re-scheduled: Binding failed for port 71f35985-e6b7-42f9-b1d6-19ed301c27b5, please check neutron logs for more information. {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 702.266433] env[62096]: DEBUG nova.compute.manager [None req-95f70777-dc9c-4d7d-99c8-605de6557417 tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] Unplugging VIFs for instance {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 702.266646] env[62096]: DEBUG oslo_concurrency.lockutils [None req-95f70777-dc9c-4d7d-99c8-605de6557417 tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Acquiring lock "refresh_cache-e5c75298-b9ef-4e28-a038-b55d8e198539" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 702.266783] env[62096]: DEBUG oslo_concurrency.lockutils [None req-95f70777-dc9c-4d7d-99c8-605de6557417 tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Acquired lock "refresh_cache-e5c75298-b9ef-4e28-a038-b55d8e198539" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 702.266942] env[62096]: DEBUG nova.network.neutron [None req-95f70777-dc9c-4d7d-99c8-605de6557417 tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 702.267323] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-95f70777-dc9c-4d7d-99c8-605de6557417 tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Expecting reply to msg d91b070d85aa40d8bae75ba6c127ddca in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 702.273628] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ae7cbb5-2da9-476c-9c39-57371fbd12e3 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.277632] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d91b070d85aa40d8bae75ba6c127ddca [ 702.287929] env[62096]: ERROR nova.compute.manager [None req-74a880f6-b7ea-4e56-b7f7-9e11c1a0c4ac tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port d00a01dc-5cbe-464a-939f-07ed16a2758d, please check neutron logs for more information. [ 702.287929] env[62096]: ERROR nova.compute.manager [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] Traceback (most recent call last): [ 702.287929] env[62096]: ERROR nova.compute.manager [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 702.287929] env[62096]: ERROR nova.compute.manager [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] yield resources [ 702.287929] env[62096]: ERROR nova.compute.manager [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 702.287929] env[62096]: ERROR nova.compute.manager [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] self.driver.spawn(context, instance, image_meta, [ 702.287929] env[62096]: ERROR nova.compute.manager [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 702.287929] env[62096]: ERROR nova.compute.manager [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 702.287929] env[62096]: ERROR nova.compute.manager [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 702.287929] env[62096]: ERROR nova.compute.manager [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] vm_ref = self.build_virtual_machine(instance, [ 702.287929] env[62096]: ERROR nova.compute.manager [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 702.288391] env[62096]: ERROR nova.compute.manager [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] vif_infos = vmwarevif.get_vif_info(self._session, [ 702.288391] env[62096]: ERROR nova.compute.manager [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 702.288391] env[62096]: ERROR nova.compute.manager [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] for vif in network_info: [ 702.288391] env[62096]: ERROR nova.compute.manager [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 702.288391] env[62096]: ERROR nova.compute.manager [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] return self._sync_wrapper(fn, *args, **kwargs) [ 702.288391] env[62096]: ERROR nova.compute.manager [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 702.288391] env[62096]: ERROR nova.compute.manager [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] self.wait() [ 702.288391] env[62096]: ERROR nova.compute.manager [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 702.288391] env[62096]: ERROR nova.compute.manager [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] self[:] = self._gt.wait() [ 702.288391] env[62096]: ERROR nova.compute.manager [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 702.288391] env[62096]: ERROR nova.compute.manager [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] return self._exit_event.wait() [ 702.288391] env[62096]: ERROR nova.compute.manager [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 702.288391] env[62096]: ERROR nova.compute.manager [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] current.throw(*self._exc) [ 702.288812] env[62096]: ERROR nova.compute.manager [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 702.288812] env[62096]: ERROR nova.compute.manager [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] result = function(*args, **kwargs) [ 702.288812] env[62096]: ERROR nova.compute.manager [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 702.288812] env[62096]: ERROR nova.compute.manager [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] return func(*args, **kwargs) [ 702.288812] env[62096]: ERROR nova.compute.manager [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 702.288812] env[62096]: ERROR nova.compute.manager [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] raise e [ 702.288812] env[62096]: ERROR nova.compute.manager [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 702.288812] env[62096]: ERROR nova.compute.manager [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] nwinfo = self.network_api.allocate_for_instance( [ 702.288812] env[62096]: ERROR nova.compute.manager [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 702.288812] env[62096]: ERROR nova.compute.manager [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] created_port_ids = self._update_ports_for_instance( [ 702.288812] env[62096]: ERROR nova.compute.manager [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 702.288812] env[62096]: ERROR nova.compute.manager [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] with excutils.save_and_reraise_exception(): [ 702.288812] env[62096]: ERROR nova.compute.manager [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 702.289226] env[62096]: ERROR nova.compute.manager [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] self.force_reraise() [ 702.289226] env[62096]: ERROR nova.compute.manager [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 702.289226] env[62096]: ERROR nova.compute.manager [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] raise self.value [ 702.289226] env[62096]: ERROR nova.compute.manager [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 702.289226] env[62096]: ERROR nova.compute.manager [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] updated_port = self._update_port( [ 702.289226] env[62096]: ERROR nova.compute.manager [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 702.289226] env[62096]: ERROR nova.compute.manager [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] _ensure_no_port_binding_failure(port) [ 702.289226] env[62096]: ERROR nova.compute.manager [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 702.289226] env[62096]: ERROR nova.compute.manager [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] raise exception.PortBindingFailed(port_id=port['id']) [ 702.289226] env[62096]: ERROR nova.compute.manager [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] nova.exception.PortBindingFailed: Binding failed for port d00a01dc-5cbe-464a-939f-07ed16a2758d, please check neutron logs for more information. [ 702.289226] env[62096]: ERROR nova.compute.manager [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] [ 702.289226] env[62096]: INFO nova.compute.manager [None req-74a880f6-b7ea-4e56-b7f7-9e11c1a0c4ac tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] Terminating instance [ 702.290092] env[62096]: DEBUG oslo_concurrency.lockutils [None req-74a880f6-b7ea-4e56-b7f7-9e11c1a0c4ac tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Acquiring lock "refresh_cache-ac8746f4-95b0-440e-bc3e-a92457ed664f" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 702.290246] env[62096]: DEBUG oslo_concurrency.lockutils [None req-74a880f6-b7ea-4e56-b7f7-9e11c1a0c4ac tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Acquired lock "refresh_cache-ac8746f4-95b0-440e-bc3e-a92457ed664f" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 702.290410] env[62096]: DEBUG nova.network.neutron [None req-74a880f6-b7ea-4e56-b7f7-9e11c1a0c4ac tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 702.290808] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-74a880f6-b7ea-4e56-b7f7-9e11c1a0c4ac tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Expecting reply to msg 4fa7033b5031460e9248d64f508947fa in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 702.298915] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4fa7033b5031460e9248d64f508947fa [ 702.304671] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3066db01f8524e85bbe1762daa20e634 [ 702.638182] env[62096]: DEBUG oslo_concurrency.lockutils [req-762ca3fd-5706-422d-8be1-085aedb79f87 req-31a19fb7-1155-49d5-9c0f-f04273b3f6d9 service nova] Releasing lock "refresh_cache-0d89651a-8d34-428a-a9e7-d56ce9d3c5b4" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 702.638460] env[62096]: DEBUG nova.compute.manager [req-762ca3fd-5706-422d-8be1-085aedb79f87 req-31a19fb7-1155-49d5-9c0f-f04273b3f6d9 service nova] [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] Received event network-vif-deleted-2d7f49b1-591c-4fbb-8ac2-6e9c7b079525 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 702.788260] env[62096]: DEBUG nova.network.neutron [None req-95f70777-dc9c-4d7d-99c8-605de6557417 tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 702.816188] env[62096]: DEBUG nova.network.neutron [None req-74a880f6-b7ea-4e56-b7f7-9e11c1a0c4ac tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 702.868377] env[62096]: DEBUG nova.network.neutron [None req-95f70777-dc9c-4d7d-99c8-605de6557417 tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 702.869026] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-95f70777-dc9c-4d7d-99c8-605de6557417 tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Expecting reply to msg 586de35cfc1a4022942a4fb374b0f521 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 702.882728] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 586de35cfc1a4022942a4fb374b0f521 [ 702.951544] env[62096]: DEBUG nova.network.neutron [None req-74a880f6-b7ea-4e56-b7f7-9e11c1a0c4ac tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 702.952092] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-74a880f6-b7ea-4e56-b7f7-9e11c1a0c4ac tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Expecting reply to msg 751e89aeb70448c68abdb9635bbced58 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 702.960344] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 751e89aeb70448c68abdb9635bbced58 [ 703.114535] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5fd0d75-724a-4134-8a38-561d5fa6bf2f {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.122166] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d46ffa3-a40d-4be3-b2f9-4f4633af0bdb {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.152072] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3c6a321-ea0b-402c-b8ca-23ed327f5bd9 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.158798] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d67a349-4476-4a1b-b296-d1dc3db852ca {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.171278] env[62096]: DEBUG nova.compute.provider_tree [None req-cdd99b9e-14f8-4543-8c76-d91f63d6fc9c tempest-ServerGroupTestJSON-613982516 tempest-ServerGroupTestJSON-613982516-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 703.171768] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-cdd99b9e-14f8-4543-8c76-d91f63d6fc9c tempest-ServerGroupTestJSON-613982516 tempest-ServerGroupTestJSON-613982516-project-member] Expecting reply to msg b216849286174f1c8098e214b0221ba9 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 703.178416] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b216849286174f1c8098e214b0221ba9 [ 703.371199] env[62096]: DEBUG oslo_concurrency.lockutils [None req-95f70777-dc9c-4d7d-99c8-605de6557417 tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Releasing lock "refresh_cache-e5c75298-b9ef-4e28-a038-b55d8e198539" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 703.371703] env[62096]: DEBUG nova.compute.manager [None req-95f70777-dc9c-4d7d-99c8-605de6557417 tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 703.372107] env[62096]: DEBUG nova.compute.manager [None req-95f70777-dc9c-4d7d-99c8-605de6557417 tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 703.372401] env[62096]: DEBUG nova.network.neutron [None req-95f70777-dc9c-4d7d-99c8-605de6557417 tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 703.388031] env[62096]: DEBUG nova.network.neutron [None req-95f70777-dc9c-4d7d-99c8-605de6557417 tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 703.388719] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-95f70777-dc9c-4d7d-99c8-605de6557417 tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Expecting reply to msg e2f1c8420337441291edd4845670d2aa in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 703.399998] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e2f1c8420337441291edd4845670d2aa [ 703.455117] env[62096]: DEBUG oslo_concurrency.lockutils [None req-74a880f6-b7ea-4e56-b7f7-9e11c1a0c4ac tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Releasing lock "refresh_cache-ac8746f4-95b0-440e-bc3e-a92457ed664f" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 703.455631] env[62096]: DEBUG nova.compute.manager [None req-74a880f6-b7ea-4e56-b7f7-9e11c1a0c4ac tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] Start destroying the instance on the hypervisor. {{(pid=62096) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 703.455844] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-74a880f6-b7ea-4e56-b7f7-9e11c1a0c4ac tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] Destroying instance {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 703.456186] env[62096]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0863231c-75da-474e-85b1-302a705b5276 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.464930] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a2f0d5d-7779-4925-bb14-602291401215 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.488546] env[62096]: WARNING nova.virt.vmwareapi.vmops [None req-74a880f6-b7ea-4e56-b7f7-9e11c1a0c4ac tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance ac8746f4-95b0-440e-bc3e-a92457ed664f could not be found. [ 703.488822] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-74a880f6-b7ea-4e56-b7f7-9e11c1a0c4ac tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] Instance destroyed {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 703.489028] env[62096]: INFO nova.compute.manager [None req-74a880f6-b7ea-4e56-b7f7-9e11c1a0c4ac tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] Took 0.03 seconds to destroy the instance on the hypervisor. [ 703.489355] env[62096]: DEBUG oslo.service.loopingcall [None req-74a880f6-b7ea-4e56-b7f7-9e11c1a0c4ac tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 703.489658] env[62096]: DEBUG nova.compute.manager [-] [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 703.489749] env[62096]: DEBUG nova.network.neutron [-] [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 703.510851] env[62096]: DEBUG nova.network.neutron [-] [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 703.511395] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg ae3619690fb14d7d92453c350b153379 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 703.521258] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ae3619690fb14d7d92453c350b153379 [ 703.571467] env[62096]: DEBUG nova.compute.manager [req-d0ea7f72-81ac-4955-8544-1f1e6c377f6f req-d2ddbea9-a728-470c-9dfc-4fe34cecf851 service nova] [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] Received event network-changed-d00a01dc-5cbe-464a-939f-07ed16a2758d {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 703.571649] env[62096]: DEBUG nova.compute.manager [req-d0ea7f72-81ac-4955-8544-1f1e6c377f6f req-d2ddbea9-a728-470c-9dfc-4fe34cecf851 service nova] [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] Refreshing instance network info cache due to event network-changed-d00a01dc-5cbe-464a-939f-07ed16a2758d. {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 703.571864] env[62096]: DEBUG oslo_concurrency.lockutils [req-d0ea7f72-81ac-4955-8544-1f1e6c377f6f req-d2ddbea9-a728-470c-9dfc-4fe34cecf851 service nova] Acquiring lock "refresh_cache-ac8746f4-95b0-440e-bc3e-a92457ed664f" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 703.572064] env[62096]: DEBUG oslo_concurrency.lockutils [req-d0ea7f72-81ac-4955-8544-1f1e6c377f6f req-d2ddbea9-a728-470c-9dfc-4fe34cecf851 service nova] Acquired lock "refresh_cache-ac8746f4-95b0-440e-bc3e-a92457ed664f" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 703.572230] env[62096]: DEBUG nova.network.neutron [req-d0ea7f72-81ac-4955-8544-1f1e6c377f6f req-d2ddbea9-a728-470c-9dfc-4fe34cecf851 service nova] [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] Refreshing network info cache for port d00a01dc-5cbe-464a-939f-07ed16a2758d {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 703.572638] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-d0ea7f72-81ac-4955-8544-1f1e6c377f6f req-d2ddbea9-a728-470c-9dfc-4fe34cecf851 service nova] Expecting reply to msg d5ade10abd344847b600529be4e7c26c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 703.579785] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d5ade10abd344847b600529be4e7c26c [ 703.674352] env[62096]: DEBUG nova.scheduler.client.report [None req-cdd99b9e-14f8-4543-8c76-d91f63d6fc9c tempest-ServerGroupTestJSON-613982516 tempest-ServerGroupTestJSON-613982516-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 703.677113] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-cdd99b9e-14f8-4543-8c76-d91f63d6fc9c tempest-ServerGroupTestJSON-613982516 tempest-ServerGroupTestJSON-613982516-project-member] Expecting reply to msg 736be391b1e746749c782221745765a1 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 703.688826] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 736be391b1e746749c782221745765a1 [ 703.891206] env[62096]: DEBUG nova.network.neutron [None req-95f70777-dc9c-4d7d-99c8-605de6557417 tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 703.891868] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-95f70777-dc9c-4d7d-99c8-605de6557417 tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Expecting reply to msg 226d7e4ea986498ba5ca16ace201fd15 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 703.901910] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 226d7e4ea986498ba5ca16ace201fd15 [ 704.013959] env[62096]: DEBUG nova.network.neutron [-] [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 704.014464] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg ac1bff42657d4208a49a85a7d4ac93b8 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 704.023063] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ac1bff42657d4208a49a85a7d4ac93b8 [ 704.090368] env[62096]: DEBUG nova.network.neutron [req-d0ea7f72-81ac-4955-8544-1f1e6c377f6f req-d2ddbea9-a728-470c-9dfc-4fe34cecf851 service nova] [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 704.175104] env[62096]: DEBUG nova.network.neutron [req-d0ea7f72-81ac-4955-8544-1f1e6c377f6f req-d2ddbea9-a728-470c-9dfc-4fe34cecf851 service nova] [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 704.175831] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-d0ea7f72-81ac-4955-8544-1f1e6c377f6f req-d2ddbea9-a728-470c-9dfc-4fe34cecf851 service nova] Expecting reply to msg abaa947ff422463d8291deac9797eb47 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 704.183531] env[62096]: DEBUG oslo_concurrency.lockutils [None req-cdd99b9e-14f8-4543-8c76-d91f63d6fc9c tempest-ServerGroupTestJSON-613982516 tempest-ServerGroupTestJSON-613982516-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.921s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 704.183531] env[62096]: ERROR nova.compute.manager [None req-cdd99b9e-14f8-4543-8c76-d91f63d6fc9c tempest-ServerGroupTestJSON-613982516 tempest-ServerGroupTestJSON-613982516-project-member] [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 3cd450aa-2fc5-4d09-a571-2b6c9545419d, please check neutron logs for more information. [ 704.183531] env[62096]: ERROR nova.compute.manager [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] Traceback (most recent call last): [ 704.183531] env[62096]: ERROR nova.compute.manager [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 704.183531] env[62096]: ERROR nova.compute.manager [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] self.driver.spawn(context, instance, image_meta, [ 704.183531] env[62096]: ERROR nova.compute.manager [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 704.183531] env[62096]: ERROR nova.compute.manager [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 704.183531] env[62096]: ERROR nova.compute.manager [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 704.183531] env[62096]: ERROR nova.compute.manager [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] vm_ref = self.build_virtual_machine(instance, [ 704.184096] env[62096]: ERROR nova.compute.manager [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 704.184096] env[62096]: ERROR nova.compute.manager [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] vif_infos = vmwarevif.get_vif_info(self._session, [ 704.184096] env[62096]: ERROR nova.compute.manager [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 704.184096] env[62096]: ERROR nova.compute.manager [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] for vif in network_info: [ 704.184096] env[62096]: ERROR nova.compute.manager [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 704.184096] env[62096]: ERROR nova.compute.manager [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] return self._sync_wrapper(fn, *args, **kwargs) [ 704.184096] env[62096]: ERROR nova.compute.manager [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 704.184096] env[62096]: ERROR nova.compute.manager [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] self.wait() [ 704.184096] env[62096]: ERROR nova.compute.manager [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 704.184096] env[62096]: ERROR nova.compute.manager [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] self[:] = self._gt.wait() [ 704.184096] env[62096]: ERROR nova.compute.manager [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 704.184096] env[62096]: ERROR nova.compute.manager [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] return self._exit_event.wait() [ 704.184096] env[62096]: ERROR nova.compute.manager [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 704.184629] env[62096]: ERROR nova.compute.manager [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] result = hub.switch() [ 704.184629] env[62096]: ERROR nova.compute.manager [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 704.184629] env[62096]: ERROR nova.compute.manager [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] return self.greenlet.switch() [ 704.184629] env[62096]: ERROR nova.compute.manager [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 704.184629] env[62096]: ERROR nova.compute.manager [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] result = function(*args, **kwargs) [ 704.184629] env[62096]: ERROR nova.compute.manager [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 704.184629] env[62096]: ERROR nova.compute.manager [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] return func(*args, **kwargs) [ 704.184629] env[62096]: ERROR nova.compute.manager [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 704.184629] env[62096]: ERROR nova.compute.manager [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] raise e [ 704.184629] env[62096]: ERROR nova.compute.manager [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 704.184629] env[62096]: ERROR nova.compute.manager [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] nwinfo = self.network_api.allocate_for_instance( [ 704.184629] env[62096]: ERROR nova.compute.manager [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 704.184629] env[62096]: ERROR nova.compute.manager [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] created_port_ids = self._update_ports_for_instance( [ 704.185140] env[62096]: ERROR nova.compute.manager [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 704.185140] env[62096]: ERROR nova.compute.manager [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] with excutils.save_and_reraise_exception(): [ 704.185140] env[62096]: ERROR nova.compute.manager [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 704.185140] env[62096]: ERROR nova.compute.manager [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] self.force_reraise() [ 704.185140] env[62096]: ERROR nova.compute.manager [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 704.185140] env[62096]: ERROR nova.compute.manager [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] raise self.value [ 704.185140] env[62096]: ERROR nova.compute.manager [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 704.185140] env[62096]: ERROR nova.compute.manager [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] updated_port = self._update_port( [ 704.185140] env[62096]: ERROR nova.compute.manager [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 704.185140] env[62096]: ERROR nova.compute.manager [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] _ensure_no_port_binding_failure(port) [ 704.185140] env[62096]: ERROR nova.compute.manager [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 704.185140] env[62096]: ERROR nova.compute.manager [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] raise exception.PortBindingFailed(port_id=port['id']) [ 704.185628] env[62096]: ERROR nova.compute.manager [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] nova.exception.PortBindingFailed: Binding failed for port 3cd450aa-2fc5-4d09-a571-2b6c9545419d, please check neutron logs for more information. [ 704.185628] env[62096]: ERROR nova.compute.manager [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] [ 704.185628] env[62096]: DEBUG nova.compute.utils [None req-cdd99b9e-14f8-4543-8c76-d91f63d6fc9c tempest-ServerGroupTestJSON-613982516 tempest-ServerGroupTestJSON-613982516-project-member] [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] Binding failed for port 3cd450aa-2fc5-4d09-a571-2b6c9545419d, please check neutron logs for more information. {{(pid=62096) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 704.185628] env[62096]: DEBUG oslo_concurrency.lockutils [None req-f283c87f-a0d6-46b3-ba4c-0ee08c3c4930 tempest-ImagesOneServerNegativeTestJSON-1092536401 tempest-ImagesOneServerNegativeTestJSON-1092536401-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.756s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 704.185628] env[62096]: INFO nova.compute.claims [None req-f283c87f-a0d6-46b3-ba4c-0ee08c3c4930 tempest-ImagesOneServerNegativeTestJSON-1092536401 tempest-ImagesOneServerNegativeTestJSON-1092536401-project-member] [instance: 7d62e302-8080-4699-b88d-cb29031e6707] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 704.187999] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-f283c87f-a0d6-46b3-ba4c-0ee08c3c4930 tempest-ImagesOneServerNegativeTestJSON-1092536401 tempest-ImagesOneServerNegativeTestJSON-1092536401-project-member] Expecting reply to msg 27e338eafafc43f29b77960c3a031ea6 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 704.190486] env[62096]: DEBUG nova.compute.manager [None req-cdd99b9e-14f8-4543-8c76-d91f63d6fc9c tempest-ServerGroupTestJSON-613982516 tempest-ServerGroupTestJSON-613982516-project-member] [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] Build of instance 641e5b3a-15ef-4d78-8339-7a26494038d9 was re-scheduled: Binding failed for port 3cd450aa-2fc5-4d09-a571-2b6c9545419d, please check neutron logs for more information. {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 704.190972] env[62096]: DEBUG nova.compute.manager [None req-cdd99b9e-14f8-4543-8c76-d91f63d6fc9c tempest-ServerGroupTestJSON-613982516 tempest-ServerGroupTestJSON-613982516-project-member] [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] Unplugging VIFs for instance {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 704.191200] env[62096]: DEBUG oslo_concurrency.lockutils [None req-cdd99b9e-14f8-4543-8c76-d91f63d6fc9c tempest-ServerGroupTestJSON-613982516 tempest-ServerGroupTestJSON-613982516-project-member] Acquiring lock "refresh_cache-641e5b3a-15ef-4d78-8339-7a26494038d9" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 704.191347] env[62096]: DEBUG oslo_concurrency.lockutils [None req-cdd99b9e-14f8-4543-8c76-d91f63d6fc9c tempest-ServerGroupTestJSON-613982516 tempest-ServerGroupTestJSON-613982516-project-member] Acquired lock "refresh_cache-641e5b3a-15ef-4d78-8339-7a26494038d9" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 704.191500] env[62096]: DEBUG nova.network.neutron [None req-cdd99b9e-14f8-4543-8c76-d91f63d6fc9c tempest-ServerGroupTestJSON-613982516 tempest-ServerGroupTestJSON-613982516-project-member] [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 704.191869] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-cdd99b9e-14f8-4543-8c76-d91f63d6fc9c tempest-ServerGroupTestJSON-613982516 tempest-ServerGroupTestJSON-613982516-project-member] Expecting reply to msg b46a13e7d7ad4b9fac815a1196ca59f4 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 704.193131] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg abaa947ff422463d8291deac9797eb47 [ 704.200677] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b46a13e7d7ad4b9fac815a1196ca59f4 [ 704.238928] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 27e338eafafc43f29b77960c3a031ea6 [ 704.394823] env[62096]: INFO nova.compute.manager [None req-95f70777-dc9c-4d7d-99c8-605de6557417 tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] [instance: e5c75298-b9ef-4e28-a038-b55d8e198539] Took 1.02 seconds to deallocate network for instance. [ 704.396550] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-95f70777-dc9c-4d7d-99c8-605de6557417 tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Expecting reply to msg 3116645fdd864e96ac731020c8518407 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 704.429775] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3116645fdd864e96ac731020c8518407 [ 704.516267] env[62096]: INFO nova.compute.manager [-] [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] Took 1.03 seconds to deallocate network for instance. [ 704.518766] env[62096]: DEBUG nova.compute.claims [None req-74a880f6-b7ea-4e56-b7f7-9e11c1a0c4ac tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] Aborting claim: {{(pid=62096) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 704.518949] env[62096]: DEBUG oslo_concurrency.lockutils [None req-74a880f6-b7ea-4e56-b7f7-9e11c1a0c4ac tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 704.678888] env[62096]: DEBUG oslo_concurrency.lockutils [req-d0ea7f72-81ac-4955-8544-1f1e6c377f6f req-d2ddbea9-a728-470c-9dfc-4fe34cecf851 service nova] Releasing lock "refresh_cache-ac8746f4-95b0-440e-bc3e-a92457ed664f" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 704.679192] env[62096]: DEBUG nova.compute.manager [req-d0ea7f72-81ac-4955-8544-1f1e6c377f6f req-d2ddbea9-a728-470c-9dfc-4fe34cecf851 service nova] [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] Received event network-vif-deleted-d00a01dc-5cbe-464a-939f-07ed16a2758d {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 704.696266] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-f283c87f-a0d6-46b3-ba4c-0ee08c3c4930 tempest-ImagesOneServerNegativeTestJSON-1092536401 tempest-ImagesOneServerNegativeTestJSON-1092536401-project-member] Expecting reply to msg 06870c33f8c6461d86c265da1accef57 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 704.713509] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 06870c33f8c6461d86c265da1accef57 [ 704.716831] env[62096]: DEBUG nova.network.neutron [None req-cdd99b9e-14f8-4543-8c76-d91f63d6fc9c tempest-ServerGroupTestJSON-613982516 tempest-ServerGroupTestJSON-613982516-project-member] [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 704.834547] env[62096]: DEBUG nova.network.neutron [None req-cdd99b9e-14f8-4543-8c76-d91f63d6fc9c tempest-ServerGroupTestJSON-613982516 tempest-ServerGroupTestJSON-613982516-project-member] [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 704.835069] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-cdd99b9e-14f8-4543-8c76-d91f63d6fc9c tempest-ServerGroupTestJSON-613982516 tempest-ServerGroupTestJSON-613982516-project-member] Expecting reply to msg fc6b5de5964c4320a56a74d081091136 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 704.844118] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fc6b5de5964c4320a56a74d081091136 [ 704.900722] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-95f70777-dc9c-4d7d-99c8-605de6557417 tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Expecting reply to msg bd330f041cfa44bd8b5fb0ad5d93733a in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 704.944920] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bd330f041cfa44bd8b5fb0ad5d93733a [ 705.344463] env[62096]: DEBUG oslo_concurrency.lockutils [None req-cdd99b9e-14f8-4543-8c76-d91f63d6fc9c tempest-ServerGroupTestJSON-613982516 tempest-ServerGroupTestJSON-613982516-project-member] Releasing lock "refresh_cache-641e5b3a-15ef-4d78-8339-7a26494038d9" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 705.344699] env[62096]: DEBUG nova.compute.manager [None req-cdd99b9e-14f8-4543-8c76-d91f63d6fc9c tempest-ServerGroupTestJSON-613982516 tempest-ServerGroupTestJSON-613982516-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 705.344889] env[62096]: DEBUG nova.compute.manager [None req-cdd99b9e-14f8-4543-8c76-d91f63d6fc9c tempest-ServerGroupTestJSON-613982516 tempest-ServerGroupTestJSON-613982516-project-member] [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 705.345078] env[62096]: DEBUG nova.network.neutron [None req-cdd99b9e-14f8-4543-8c76-d91f63d6fc9c tempest-ServerGroupTestJSON-613982516 tempest-ServerGroupTestJSON-613982516-project-member] [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 705.360168] env[62096]: DEBUG nova.network.neutron [None req-cdd99b9e-14f8-4543-8c76-d91f63d6fc9c tempest-ServerGroupTestJSON-613982516 tempest-ServerGroupTestJSON-613982516-project-member] [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 705.360761] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-cdd99b9e-14f8-4543-8c76-d91f63d6fc9c tempest-ServerGroupTestJSON-613982516 tempest-ServerGroupTestJSON-613982516-project-member] Expecting reply to msg 78b2dd9d7cbd43318abbd8f768660fa2 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 705.371756] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 78b2dd9d7cbd43318abbd8f768660fa2 [ 705.435765] env[62096]: INFO nova.scheduler.client.report [None req-95f70777-dc9c-4d7d-99c8-605de6557417 tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Deleted allocations for instance e5c75298-b9ef-4e28-a038-b55d8e198539 [ 705.453812] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-95f70777-dc9c-4d7d-99c8-605de6557417 tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Expecting reply to msg be13338ff174453db756a4b0ae92da06 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 705.513020] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg be13338ff174453db756a4b0ae92da06 [ 705.671345] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8332b306-5884-4b90-9ffd-68c5dfb8da3b {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.680262] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-678c5dc9-3edb-462f-ba02-7b198670002d {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.712189] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19e1c090-a284-41e6-a2e1-2ff935f58964 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.720379] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f44015c5-211f-4768-a0fb-18ca86cce257 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.735052] env[62096]: DEBUG nova.compute.provider_tree [None req-f283c87f-a0d6-46b3-ba4c-0ee08c3c4930 tempest-ImagesOneServerNegativeTestJSON-1092536401 tempest-ImagesOneServerNegativeTestJSON-1092536401-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 705.735731] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-f283c87f-a0d6-46b3-ba4c-0ee08c3c4930 tempest-ImagesOneServerNegativeTestJSON-1092536401 tempest-ImagesOneServerNegativeTestJSON-1092536401-project-member] Expecting reply to msg 30fdfc1117554d9a926fd7bdc2a732ac in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 705.743104] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 30fdfc1117554d9a926fd7bdc2a732ac [ 705.869642] env[62096]: DEBUG nova.network.neutron [None req-cdd99b9e-14f8-4543-8c76-d91f63d6fc9c tempest-ServerGroupTestJSON-613982516 tempest-ServerGroupTestJSON-613982516-project-member] [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 705.870367] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-cdd99b9e-14f8-4543-8c76-d91f63d6fc9c tempest-ServerGroupTestJSON-613982516 tempest-ServerGroupTestJSON-613982516-project-member] Expecting reply to msg e2f605f2d3b2427bb47a34fc32e8df88 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 705.889251] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e2f605f2d3b2427bb47a34fc32e8df88 [ 705.957095] env[62096]: DEBUG oslo_concurrency.lockutils [None req-95f70777-dc9c-4d7d-99c8-605de6557417 tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Lock "e5c75298-b9ef-4e28-a038-b55d8e198539" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 123.613s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 705.958258] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-31582930-cf6e-43cb-a675-23470985f47b tempest-ServerActionsV293TestJSON-192366267 tempest-ServerActionsV293TestJSON-192366267-project-member] Expecting reply to msg 2f747d42104141ddad7034f7bc7df4d6 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 705.967657] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2f747d42104141ddad7034f7bc7df4d6 [ 706.238095] env[62096]: DEBUG nova.scheduler.client.report [None req-f283c87f-a0d6-46b3-ba4c-0ee08c3c4930 tempest-ImagesOneServerNegativeTestJSON-1092536401 tempest-ImagesOneServerNegativeTestJSON-1092536401-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 706.240571] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-f283c87f-a0d6-46b3-ba4c-0ee08c3c4930 tempest-ImagesOneServerNegativeTestJSON-1092536401 tempest-ImagesOneServerNegativeTestJSON-1092536401-project-member] Expecting reply to msg 8793685663e048418cdf2d638bf9c0ba in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 706.279283] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8793685663e048418cdf2d638bf9c0ba [ 706.373498] env[62096]: INFO nova.compute.manager [None req-cdd99b9e-14f8-4543-8c76-d91f63d6fc9c tempest-ServerGroupTestJSON-613982516 tempest-ServerGroupTestJSON-613982516-project-member] [instance: 641e5b3a-15ef-4d78-8339-7a26494038d9] Took 1.03 seconds to deallocate network for instance. [ 706.375259] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-cdd99b9e-14f8-4543-8c76-d91f63d6fc9c tempest-ServerGroupTestJSON-613982516 tempest-ServerGroupTestJSON-613982516-project-member] Expecting reply to msg da98ba3be1424e218178a8dbc404b22d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 706.407286] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg da98ba3be1424e218178a8dbc404b22d [ 706.460597] env[62096]: DEBUG nova.compute.manager [None req-31582930-cf6e-43cb-a675-23470985f47b tempest-ServerActionsV293TestJSON-192366267 tempest-ServerActionsV293TestJSON-192366267-project-member] [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] Starting instance... {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 706.462412] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-31582930-cf6e-43cb-a675-23470985f47b tempest-ServerActionsV293TestJSON-192366267 tempest-ServerActionsV293TestJSON-192366267-project-member] Expecting reply to msg de979ff378b6488f90c9d2d1fd2e17ac in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 706.516819] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg de979ff378b6488f90c9d2d1fd2e17ac [ 706.745988] env[62096]: DEBUG oslo_concurrency.lockutils [None req-f283c87f-a0d6-46b3-ba4c-0ee08c3c4930 tempest-ImagesOneServerNegativeTestJSON-1092536401 tempest-ImagesOneServerNegativeTestJSON-1092536401-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.562s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 706.746547] env[62096]: DEBUG nova.compute.manager [None req-f283c87f-a0d6-46b3-ba4c-0ee08c3c4930 tempest-ImagesOneServerNegativeTestJSON-1092536401 tempest-ImagesOneServerNegativeTestJSON-1092536401-project-member] [instance: 7d62e302-8080-4699-b88d-cb29031e6707] Start building networks asynchronously for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 706.748329] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-f283c87f-a0d6-46b3-ba4c-0ee08c3c4930 tempest-ImagesOneServerNegativeTestJSON-1092536401 tempest-ImagesOneServerNegativeTestJSON-1092536401-project-member] Expecting reply to msg 0ea410900d1e4ae4b0042691bab8b956 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 706.749360] env[62096]: DEBUG oslo_concurrency.lockutils [None req-1aa056a4-9f55-42a7-a362-b5b727b382b0 tempest-ServersTestManualDisk-1610766175 tempest-ServersTestManualDisk-1610766175-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.742s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 706.759486] env[62096]: INFO nova.compute.claims [None req-1aa056a4-9f55-42a7-a362-b5b727b382b0 tempest-ServersTestManualDisk-1610766175 tempest-ServersTestManualDisk-1610766175-project-member] [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 706.759486] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1aa056a4-9f55-42a7-a362-b5b727b382b0 tempest-ServersTestManualDisk-1610766175 tempest-ServersTestManualDisk-1610766175-project-member] Expecting reply to msg ff750b36affd414091ed27cd03b5dcae in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 706.788220] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ff750b36affd414091ed27cd03b5dcae [ 706.798664] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0ea410900d1e4ae4b0042691bab8b956 [ 706.879209] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-cdd99b9e-14f8-4543-8c76-d91f63d6fc9c tempest-ServerGroupTestJSON-613982516 tempest-ServerGroupTestJSON-613982516-project-member] Expecting reply to msg c93abafda07f413f9b565aa7aedb0616 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 706.919637] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c93abafda07f413f9b565aa7aedb0616 [ 706.986726] env[62096]: DEBUG oslo_concurrency.lockutils [None req-31582930-cf6e-43cb-a675-23470985f47b tempest-ServerActionsV293TestJSON-192366267 tempest-ServerActionsV293TestJSON-192366267-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 707.255610] env[62096]: DEBUG nova.compute.utils [None req-f283c87f-a0d6-46b3-ba4c-0ee08c3c4930 tempest-ImagesOneServerNegativeTestJSON-1092536401 tempest-ImagesOneServerNegativeTestJSON-1092536401-project-member] Using /dev/sd instead of None {{(pid=62096) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 707.256369] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-f283c87f-a0d6-46b3-ba4c-0ee08c3c4930 tempest-ImagesOneServerNegativeTestJSON-1092536401 tempest-ImagesOneServerNegativeTestJSON-1092536401-project-member] Expecting reply to msg 9366de6d4876415db6cf572c345faeee in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 707.258325] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1aa056a4-9f55-42a7-a362-b5b727b382b0 tempest-ServersTestManualDisk-1610766175 tempest-ServersTestManualDisk-1610766175-project-member] Expecting reply to msg db2a04f1da684c9b8af55be2dc498720 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 707.259103] env[62096]: DEBUG nova.compute.manager [None req-f283c87f-a0d6-46b3-ba4c-0ee08c3c4930 tempest-ImagesOneServerNegativeTestJSON-1092536401 tempest-ImagesOneServerNegativeTestJSON-1092536401-project-member] [instance: 7d62e302-8080-4699-b88d-cb29031e6707] Allocating IP information in the background. {{(pid=62096) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 707.259270] env[62096]: DEBUG nova.network.neutron [None req-f283c87f-a0d6-46b3-ba4c-0ee08c3c4930 tempest-ImagesOneServerNegativeTestJSON-1092536401 tempest-ImagesOneServerNegativeTestJSON-1092536401-project-member] [instance: 7d62e302-8080-4699-b88d-cb29031e6707] allocate_for_instance() {{(pid=62096) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 707.268412] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg db2a04f1da684c9b8af55be2dc498720 [ 707.275557] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9366de6d4876415db6cf572c345faeee [ 707.310493] env[62096]: DEBUG nova.policy [None req-f283c87f-a0d6-46b3-ba4c-0ee08c3c4930 tempest-ImagesOneServerNegativeTestJSON-1092536401 tempest-ImagesOneServerNegativeTestJSON-1092536401-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '85fd886ffc1b4cb1be861013c9b47a0b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '06243c61c0054268a5ec58cac55a292c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62096) authorize /opt/stack/nova/nova/policy.py:203}} [ 707.401686] env[62096]: INFO nova.scheduler.client.report [None req-cdd99b9e-14f8-4543-8c76-d91f63d6fc9c tempest-ServerGroupTestJSON-613982516 tempest-ServerGroupTestJSON-613982516-project-member] Deleted allocations for instance 641e5b3a-15ef-4d78-8339-7a26494038d9 [ 707.408627] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-cdd99b9e-14f8-4543-8c76-d91f63d6fc9c tempest-ServerGroupTestJSON-613982516 tempest-ServerGroupTestJSON-613982516-project-member] Expecting reply to msg e75fed53c1e247889e2e0432b332dc7b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 707.430637] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e75fed53c1e247889e2e0432b332dc7b [ 707.672750] env[62096]: DEBUG nova.network.neutron [None req-f283c87f-a0d6-46b3-ba4c-0ee08c3c4930 tempest-ImagesOneServerNegativeTestJSON-1092536401 tempest-ImagesOneServerNegativeTestJSON-1092536401-project-member] [instance: 7d62e302-8080-4699-b88d-cb29031e6707] Successfully created port: 6421eacc-4575-4a31-a454-a75f25e03132 {{(pid=62096) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 707.772113] env[62096]: DEBUG nova.compute.manager [None req-f283c87f-a0d6-46b3-ba4c-0ee08c3c4930 tempest-ImagesOneServerNegativeTestJSON-1092536401 tempest-ImagesOneServerNegativeTestJSON-1092536401-project-member] [instance: 7d62e302-8080-4699-b88d-cb29031e6707] Start building block device mappings for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 707.772113] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-f283c87f-a0d6-46b3-ba4c-0ee08c3c4930 tempest-ImagesOneServerNegativeTestJSON-1092536401 tempest-ImagesOneServerNegativeTestJSON-1092536401-project-member] Expecting reply to msg a42b696ebf2845ce9385237d7ed74054 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 707.799716] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a42b696ebf2845ce9385237d7ed74054 [ 707.913184] env[62096]: DEBUG oslo_concurrency.lockutils [None req-cdd99b9e-14f8-4543-8c76-d91f63d6fc9c tempest-ServerGroupTestJSON-613982516 tempest-ServerGroupTestJSON-613982516-project-member] Lock "641e5b3a-15ef-4d78-8339-7a26494038d9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 124.866s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 707.913795] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-5b2f59bb-27e5-498c-aa13-1fed23947587 tempest-ServerRescueTestJSON-190058497 tempest-ServerRescueTestJSON-190058497-project-member] Expecting reply to msg ec6eb860af03497196e61c9841d04241 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 707.926691] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ec6eb860af03497196e61c9841d04241 [ 708.112250] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0608bd4f-3f20-4f54-9d53-4de769e196bd {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.119372] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7324d137-9cc4-4afb-a947-8f315cb21cca {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.151056] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be7f5c5f-4cb9-4ab5-94e8-b8d7c0466879 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.159153] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f5fc1fa-f459-444b-a2f7-bf7dc2c6e15c {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.172094] env[62096]: DEBUG nova.compute.provider_tree [None req-1aa056a4-9f55-42a7-a362-b5b727b382b0 tempest-ServersTestManualDisk-1610766175 tempest-ServersTestManualDisk-1610766175-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 708.172602] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1aa056a4-9f55-42a7-a362-b5b727b382b0 tempest-ServersTestManualDisk-1610766175 tempest-ServersTestManualDisk-1610766175-project-member] Expecting reply to msg 04cd5579f9b740e58ff764acd5429154 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 708.180044] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 04cd5579f9b740e58ff764acd5429154 [ 708.271104] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-f283c87f-a0d6-46b3-ba4c-0ee08c3c4930 tempest-ImagesOneServerNegativeTestJSON-1092536401 tempest-ImagesOneServerNegativeTestJSON-1092536401-project-member] Expecting reply to msg b89016a405344e219495e7450099ef0d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 708.305695] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b89016a405344e219495e7450099ef0d [ 708.416188] env[62096]: DEBUG nova.compute.manager [None req-5b2f59bb-27e5-498c-aa13-1fed23947587 tempest-ServerRescueTestJSON-190058497 tempest-ServerRescueTestJSON-190058497-project-member] [instance: d38be540-9cd0-428a-b10d-313d2d464b25] Starting instance... {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 708.418083] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-5b2f59bb-27e5-498c-aa13-1fed23947587 tempest-ServerRescueTestJSON-190058497 tempest-ServerRescueTestJSON-190058497-project-member] Expecting reply to msg e2f71f2156fb49f5be2d8ea4a0be8782 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 708.472440] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e2f71f2156fb49f5be2d8ea4a0be8782 [ 708.678655] env[62096]: DEBUG nova.scheduler.client.report [None req-1aa056a4-9f55-42a7-a362-b5b727b382b0 tempest-ServersTestManualDisk-1610766175 tempest-ServersTestManualDisk-1610766175-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 708.678655] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1aa056a4-9f55-42a7-a362-b5b727b382b0 tempest-ServersTestManualDisk-1610766175 tempest-ServersTestManualDisk-1610766175-project-member] Expecting reply to msg cb06b29faa1f48a0bf83f00fd4afb769 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 708.689383] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cb06b29faa1f48a0bf83f00fd4afb769 [ 708.775881] env[62096]: DEBUG nova.compute.manager [None req-f283c87f-a0d6-46b3-ba4c-0ee08c3c4930 tempest-ImagesOneServerNegativeTestJSON-1092536401 tempest-ImagesOneServerNegativeTestJSON-1092536401-project-member] [instance: 7d62e302-8080-4699-b88d-cb29031e6707] Start spawning the instance on the hypervisor. {{(pid=62096) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 708.800315] env[62096]: DEBUG nova.virt.hardware [None req-f283c87f-a0d6-46b3-ba4c-0ee08c3c4930 tempest-ImagesOneServerNegativeTestJSON-1092536401 tempest-ImagesOneServerNegativeTestJSON-1092536401-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-19T11:52:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-19T11:51:55Z,direct_url=,disk_format='vmdk',id=fb764baa-3805-45c4-a694-aa91b0932110,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bd76085d004b408bbcf1ebf76ba2593e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-19T11:51:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 708.800545] env[62096]: DEBUG nova.virt.hardware [None req-f283c87f-a0d6-46b3-ba4c-0ee08c3c4930 tempest-ImagesOneServerNegativeTestJSON-1092536401 tempest-ImagesOneServerNegativeTestJSON-1092536401-project-member] Flavor limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 708.800697] env[62096]: DEBUG nova.virt.hardware [None req-f283c87f-a0d6-46b3-ba4c-0ee08c3c4930 tempest-ImagesOneServerNegativeTestJSON-1092536401 tempest-ImagesOneServerNegativeTestJSON-1092536401-project-member] Image limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 708.800866] env[62096]: DEBUG nova.virt.hardware [None req-f283c87f-a0d6-46b3-ba4c-0ee08c3c4930 tempest-ImagesOneServerNegativeTestJSON-1092536401 tempest-ImagesOneServerNegativeTestJSON-1092536401-project-member] Flavor pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 708.801014] env[62096]: DEBUG nova.virt.hardware [None req-f283c87f-a0d6-46b3-ba4c-0ee08c3c4930 tempest-ImagesOneServerNegativeTestJSON-1092536401 tempest-ImagesOneServerNegativeTestJSON-1092536401-project-member] Image pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 708.801154] env[62096]: DEBUG nova.virt.hardware [None req-f283c87f-a0d6-46b3-ba4c-0ee08c3c4930 tempest-ImagesOneServerNegativeTestJSON-1092536401 tempest-ImagesOneServerNegativeTestJSON-1092536401-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 708.801352] env[62096]: DEBUG nova.virt.hardware [None req-f283c87f-a0d6-46b3-ba4c-0ee08c3c4930 tempest-ImagesOneServerNegativeTestJSON-1092536401 tempest-ImagesOneServerNegativeTestJSON-1092536401-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 708.801503] env[62096]: DEBUG nova.virt.hardware [None req-f283c87f-a0d6-46b3-ba4c-0ee08c3c4930 tempest-ImagesOneServerNegativeTestJSON-1092536401 tempest-ImagesOneServerNegativeTestJSON-1092536401-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 708.801657] env[62096]: DEBUG nova.virt.hardware [None req-f283c87f-a0d6-46b3-ba4c-0ee08c3c4930 tempest-ImagesOneServerNegativeTestJSON-1092536401 tempest-ImagesOneServerNegativeTestJSON-1092536401-project-member] Got 1 possible topologies {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 708.801811] env[62096]: DEBUG nova.virt.hardware [None req-f283c87f-a0d6-46b3-ba4c-0ee08c3c4930 tempest-ImagesOneServerNegativeTestJSON-1092536401 tempest-ImagesOneServerNegativeTestJSON-1092536401-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 708.802010] env[62096]: DEBUG nova.virt.hardware [None req-f283c87f-a0d6-46b3-ba4c-0ee08c3c4930 tempest-ImagesOneServerNegativeTestJSON-1092536401 tempest-ImagesOneServerNegativeTestJSON-1092536401-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 708.803006] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09708024-14dd-4fa6-a075-acaba21aac27 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.811110] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2236a1a-58d4-425c-8af1-1f6a27037aa0 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.938980] env[62096]: DEBUG oslo_concurrency.lockutils [None req-5b2f59bb-27e5-498c-aa13-1fed23947587 tempest-ServerRescueTestJSON-190058497 tempest-ServerRescueTestJSON-190058497-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 709.121034] env[62096]: DEBUG oslo_concurrency.lockutils [None req-e73da030-7e22-49e7-bda0-aec2ff9e56cf tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Acquiring lock "46232e88-ad63-42bc-bf51-2a0758e6ec3a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 709.121034] env[62096]: DEBUG oslo_concurrency.lockutils [None req-e73da030-7e22-49e7-bda0-aec2ff9e56cf tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Lock "46232e88-ad63-42bc-bf51-2a0758e6ec3a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 709.180275] env[62096]: DEBUG oslo_concurrency.lockutils [None req-1aa056a4-9f55-42a7-a362-b5b727b382b0 tempest-ServersTestManualDisk-1610766175 tempest-ServersTestManualDisk-1610766175-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.431s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 709.180833] env[62096]: DEBUG nova.compute.manager [None req-1aa056a4-9f55-42a7-a362-b5b727b382b0 tempest-ServersTestManualDisk-1610766175 tempest-ServersTestManualDisk-1610766175-project-member] [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] Start building networks asynchronously for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 709.182810] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1aa056a4-9f55-42a7-a362-b5b727b382b0 tempest-ServersTestManualDisk-1610766175 tempest-ServersTestManualDisk-1610766175-project-member] Expecting reply to msg 9fbc0fd48f11471a81032282ff03fabd in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 709.184433] env[62096]: DEBUG oslo_concurrency.lockutils [None req-5ad75242-2efb-4f8b-a5d0-817319fe309a tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 31.517s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 709.186378] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-5ad75242-2efb-4f8b-a5d0-817319fe309a tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Expecting reply to msg 5b7cb96d00c44eec977eda918ba8c482 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 709.216692] env[62096]: DEBUG nova.compute.manager [req-fb1a5860-3241-4205-9806-fdd1da71d81a req-4560c70e-85ca-4140-886a-86f99fb0733f service nova] [instance: 7d62e302-8080-4699-b88d-cb29031e6707] Received event network-changed-6421eacc-4575-4a31-a454-a75f25e03132 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 709.216874] env[62096]: DEBUG nova.compute.manager [req-fb1a5860-3241-4205-9806-fdd1da71d81a req-4560c70e-85ca-4140-886a-86f99fb0733f service nova] [instance: 7d62e302-8080-4699-b88d-cb29031e6707] Refreshing instance network info cache due to event network-changed-6421eacc-4575-4a31-a454-a75f25e03132. {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 709.217080] env[62096]: DEBUG oslo_concurrency.lockutils [req-fb1a5860-3241-4205-9806-fdd1da71d81a req-4560c70e-85ca-4140-886a-86f99fb0733f service nova] Acquiring lock "refresh_cache-7d62e302-8080-4699-b88d-cb29031e6707" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 709.217729] env[62096]: DEBUG oslo_concurrency.lockutils [req-fb1a5860-3241-4205-9806-fdd1da71d81a req-4560c70e-85ca-4140-886a-86f99fb0733f service nova] Acquired lock "refresh_cache-7d62e302-8080-4699-b88d-cb29031e6707" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 709.217940] env[62096]: DEBUG nova.network.neutron [req-fb1a5860-3241-4205-9806-fdd1da71d81a req-4560c70e-85ca-4140-886a-86f99fb0733f service nova] [instance: 7d62e302-8080-4699-b88d-cb29031e6707] Refreshing network info cache for port 6421eacc-4575-4a31-a454-a75f25e03132 {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 709.218387] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-fb1a5860-3241-4205-9806-fdd1da71d81a req-4560c70e-85ca-4140-886a-86f99fb0733f service nova] Expecting reply to msg 20aa81872ff94b67a5b307340827bc13 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 709.220144] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5b7cb96d00c44eec977eda918ba8c482 [ 709.233439] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9fbc0fd48f11471a81032282ff03fabd [ 709.234349] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 20aa81872ff94b67a5b307340827bc13 [ 709.259495] env[62096]: ERROR nova.compute.manager [None req-f283c87f-a0d6-46b3-ba4c-0ee08c3c4930 tempest-ImagesOneServerNegativeTestJSON-1092536401 tempest-ImagesOneServerNegativeTestJSON-1092536401-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 6421eacc-4575-4a31-a454-a75f25e03132, please check neutron logs for more information. [ 709.259495] env[62096]: ERROR nova.compute.manager Traceback (most recent call last): [ 709.259495] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 709.259495] env[62096]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 709.259495] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 709.259495] env[62096]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 709.259495] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 709.259495] env[62096]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 709.259495] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 709.259495] env[62096]: ERROR nova.compute.manager self.force_reraise() [ 709.259495] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 709.259495] env[62096]: ERROR nova.compute.manager raise self.value [ 709.259495] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 709.259495] env[62096]: ERROR nova.compute.manager updated_port = self._update_port( [ 709.259495] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 709.259495] env[62096]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 709.260096] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 709.260096] env[62096]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 709.260096] env[62096]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 6421eacc-4575-4a31-a454-a75f25e03132, please check neutron logs for more information. [ 709.260096] env[62096]: ERROR nova.compute.manager [ 709.260096] env[62096]: Traceback (most recent call last): [ 709.260096] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 709.260096] env[62096]: listener.cb(fileno) [ 709.260096] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 709.260096] env[62096]: result = function(*args, **kwargs) [ 709.260096] env[62096]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 709.260096] env[62096]: return func(*args, **kwargs) [ 709.260096] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 709.260096] env[62096]: raise e [ 709.260096] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 709.260096] env[62096]: nwinfo = self.network_api.allocate_for_instance( [ 709.260096] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 709.260096] env[62096]: created_port_ids = self._update_ports_for_instance( [ 709.260096] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 709.260096] env[62096]: with excutils.save_and_reraise_exception(): [ 709.260096] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 709.260096] env[62096]: self.force_reraise() [ 709.260096] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 709.260096] env[62096]: raise self.value [ 709.260096] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 709.260096] env[62096]: updated_port = self._update_port( [ 709.260096] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 709.260096] env[62096]: _ensure_no_port_binding_failure(port) [ 709.260096] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 709.260096] env[62096]: raise exception.PortBindingFailed(port_id=port['id']) [ 709.260996] env[62096]: nova.exception.PortBindingFailed: Binding failed for port 6421eacc-4575-4a31-a454-a75f25e03132, please check neutron logs for more information. [ 709.260996] env[62096]: Removing descriptor: 16 [ 709.260996] env[62096]: ERROR nova.compute.manager [None req-f283c87f-a0d6-46b3-ba4c-0ee08c3c4930 tempest-ImagesOneServerNegativeTestJSON-1092536401 tempest-ImagesOneServerNegativeTestJSON-1092536401-project-member] [instance: 7d62e302-8080-4699-b88d-cb29031e6707] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 6421eacc-4575-4a31-a454-a75f25e03132, please check neutron logs for more information. [ 709.260996] env[62096]: ERROR nova.compute.manager [instance: 7d62e302-8080-4699-b88d-cb29031e6707] Traceback (most recent call last): [ 709.260996] env[62096]: ERROR nova.compute.manager [instance: 7d62e302-8080-4699-b88d-cb29031e6707] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 709.260996] env[62096]: ERROR nova.compute.manager [instance: 7d62e302-8080-4699-b88d-cb29031e6707] yield resources [ 709.260996] env[62096]: ERROR nova.compute.manager [instance: 7d62e302-8080-4699-b88d-cb29031e6707] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 709.260996] env[62096]: ERROR nova.compute.manager [instance: 7d62e302-8080-4699-b88d-cb29031e6707] self.driver.spawn(context, instance, image_meta, [ 709.260996] env[62096]: ERROR nova.compute.manager [instance: 7d62e302-8080-4699-b88d-cb29031e6707] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 709.260996] env[62096]: ERROR nova.compute.manager [instance: 7d62e302-8080-4699-b88d-cb29031e6707] self._vmops.spawn(context, instance, image_meta, injected_files, [ 709.260996] env[62096]: ERROR nova.compute.manager [instance: 7d62e302-8080-4699-b88d-cb29031e6707] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 709.260996] env[62096]: ERROR nova.compute.manager [instance: 7d62e302-8080-4699-b88d-cb29031e6707] vm_ref = self.build_virtual_machine(instance, [ 709.261385] env[62096]: ERROR nova.compute.manager [instance: 7d62e302-8080-4699-b88d-cb29031e6707] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 709.261385] env[62096]: ERROR nova.compute.manager [instance: 7d62e302-8080-4699-b88d-cb29031e6707] vif_infos = vmwarevif.get_vif_info(self._session, [ 709.261385] env[62096]: ERROR nova.compute.manager [instance: 7d62e302-8080-4699-b88d-cb29031e6707] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 709.261385] env[62096]: ERROR nova.compute.manager [instance: 7d62e302-8080-4699-b88d-cb29031e6707] for vif in network_info: [ 709.261385] env[62096]: ERROR nova.compute.manager [instance: 7d62e302-8080-4699-b88d-cb29031e6707] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 709.261385] env[62096]: ERROR nova.compute.manager [instance: 7d62e302-8080-4699-b88d-cb29031e6707] return self._sync_wrapper(fn, *args, **kwargs) [ 709.261385] env[62096]: ERROR nova.compute.manager [instance: 7d62e302-8080-4699-b88d-cb29031e6707] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 709.261385] env[62096]: ERROR nova.compute.manager [instance: 7d62e302-8080-4699-b88d-cb29031e6707] self.wait() [ 709.261385] env[62096]: ERROR nova.compute.manager [instance: 7d62e302-8080-4699-b88d-cb29031e6707] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 709.261385] env[62096]: ERROR nova.compute.manager [instance: 7d62e302-8080-4699-b88d-cb29031e6707] self[:] = self._gt.wait() [ 709.261385] env[62096]: ERROR nova.compute.manager [instance: 7d62e302-8080-4699-b88d-cb29031e6707] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 709.261385] env[62096]: ERROR nova.compute.manager [instance: 7d62e302-8080-4699-b88d-cb29031e6707] return self._exit_event.wait() [ 709.261385] env[62096]: ERROR nova.compute.manager [instance: 7d62e302-8080-4699-b88d-cb29031e6707] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 709.261784] env[62096]: ERROR nova.compute.manager [instance: 7d62e302-8080-4699-b88d-cb29031e6707] result = hub.switch() [ 709.261784] env[62096]: ERROR nova.compute.manager [instance: 7d62e302-8080-4699-b88d-cb29031e6707] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 709.261784] env[62096]: ERROR nova.compute.manager [instance: 7d62e302-8080-4699-b88d-cb29031e6707] return self.greenlet.switch() [ 709.261784] env[62096]: ERROR nova.compute.manager [instance: 7d62e302-8080-4699-b88d-cb29031e6707] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 709.261784] env[62096]: ERROR nova.compute.manager [instance: 7d62e302-8080-4699-b88d-cb29031e6707] result = function(*args, **kwargs) [ 709.261784] env[62096]: ERROR nova.compute.manager [instance: 7d62e302-8080-4699-b88d-cb29031e6707] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 709.261784] env[62096]: ERROR nova.compute.manager [instance: 7d62e302-8080-4699-b88d-cb29031e6707] return func(*args, **kwargs) [ 709.261784] env[62096]: ERROR nova.compute.manager [instance: 7d62e302-8080-4699-b88d-cb29031e6707] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 709.261784] env[62096]: ERROR nova.compute.manager [instance: 7d62e302-8080-4699-b88d-cb29031e6707] raise e [ 709.261784] env[62096]: ERROR nova.compute.manager [instance: 7d62e302-8080-4699-b88d-cb29031e6707] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 709.261784] env[62096]: ERROR nova.compute.manager [instance: 7d62e302-8080-4699-b88d-cb29031e6707] nwinfo = self.network_api.allocate_for_instance( [ 709.261784] env[62096]: ERROR nova.compute.manager [instance: 7d62e302-8080-4699-b88d-cb29031e6707] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 709.261784] env[62096]: ERROR nova.compute.manager [instance: 7d62e302-8080-4699-b88d-cb29031e6707] created_port_ids = self._update_ports_for_instance( [ 709.262226] env[62096]: ERROR nova.compute.manager [instance: 7d62e302-8080-4699-b88d-cb29031e6707] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 709.262226] env[62096]: ERROR nova.compute.manager [instance: 7d62e302-8080-4699-b88d-cb29031e6707] with excutils.save_and_reraise_exception(): [ 709.262226] env[62096]: ERROR nova.compute.manager [instance: 7d62e302-8080-4699-b88d-cb29031e6707] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 709.262226] env[62096]: ERROR nova.compute.manager [instance: 7d62e302-8080-4699-b88d-cb29031e6707] self.force_reraise() [ 709.262226] env[62096]: ERROR nova.compute.manager [instance: 7d62e302-8080-4699-b88d-cb29031e6707] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 709.262226] env[62096]: ERROR nova.compute.manager [instance: 7d62e302-8080-4699-b88d-cb29031e6707] raise self.value [ 709.262226] env[62096]: ERROR nova.compute.manager [instance: 7d62e302-8080-4699-b88d-cb29031e6707] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 709.262226] env[62096]: ERROR nova.compute.manager [instance: 7d62e302-8080-4699-b88d-cb29031e6707] updated_port = self._update_port( [ 709.262226] env[62096]: ERROR nova.compute.manager [instance: 7d62e302-8080-4699-b88d-cb29031e6707] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 709.262226] env[62096]: ERROR nova.compute.manager [instance: 7d62e302-8080-4699-b88d-cb29031e6707] _ensure_no_port_binding_failure(port) [ 709.262226] env[62096]: ERROR nova.compute.manager [instance: 7d62e302-8080-4699-b88d-cb29031e6707] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 709.262226] env[62096]: ERROR nova.compute.manager [instance: 7d62e302-8080-4699-b88d-cb29031e6707] raise exception.PortBindingFailed(port_id=port['id']) [ 709.262616] env[62096]: ERROR nova.compute.manager [instance: 7d62e302-8080-4699-b88d-cb29031e6707] nova.exception.PortBindingFailed: Binding failed for port 6421eacc-4575-4a31-a454-a75f25e03132, please check neutron logs for more information. [ 709.262616] env[62096]: ERROR nova.compute.manager [instance: 7d62e302-8080-4699-b88d-cb29031e6707] [ 709.262616] env[62096]: INFO nova.compute.manager [None req-f283c87f-a0d6-46b3-ba4c-0ee08c3c4930 tempest-ImagesOneServerNegativeTestJSON-1092536401 tempest-ImagesOneServerNegativeTestJSON-1092536401-project-member] [instance: 7d62e302-8080-4699-b88d-cb29031e6707] Terminating instance [ 709.263641] env[62096]: DEBUG oslo_concurrency.lockutils [None req-f283c87f-a0d6-46b3-ba4c-0ee08c3c4930 tempest-ImagesOneServerNegativeTestJSON-1092536401 tempest-ImagesOneServerNegativeTestJSON-1092536401-project-member] Acquiring lock "refresh_cache-7d62e302-8080-4699-b88d-cb29031e6707" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 709.686816] env[62096]: DEBUG nova.compute.utils [None req-1aa056a4-9f55-42a7-a362-b5b727b382b0 tempest-ServersTestManualDisk-1610766175 tempest-ServersTestManualDisk-1610766175-project-member] Using /dev/sd instead of None {{(pid=62096) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 709.687456] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1aa056a4-9f55-42a7-a362-b5b727b382b0 tempest-ServersTestManualDisk-1610766175 tempest-ServersTestManualDisk-1610766175-project-member] Expecting reply to msg 446a9e97a4094e16a5284ca37c7775ac in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 709.688988] env[62096]: DEBUG nova.compute.manager [None req-1aa056a4-9f55-42a7-a362-b5b727b382b0 tempest-ServersTestManualDisk-1610766175 tempest-ServersTestManualDisk-1610766175-project-member] [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] Allocating IP information in the background. {{(pid=62096) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 709.689162] env[62096]: DEBUG nova.network.neutron [None req-1aa056a4-9f55-42a7-a362-b5b727b382b0 tempest-ServersTestManualDisk-1610766175 tempest-ServersTestManualDisk-1610766175-project-member] [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] allocate_for_instance() {{(pid=62096) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 709.699419] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 446a9e97a4094e16a5284ca37c7775ac [ 709.739876] env[62096]: DEBUG nova.network.neutron [req-fb1a5860-3241-4205-9806-fdd1da71d81a req-4560c70e-85ca-4140-886a-86f99fb0733f service nova] [instance: 7d62e302-8080-4699-b88d-cb29031e6707] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 709.815192] env[62096]: DEBUG nova.network.neutron [req-fb1a5860-3241-4205-9806-fdd1da71d81a req-4560c70e-85ca-4140-886a-86f99fb0733f service nova] [instance: 7d62e302-8080-4699-b88d-cb29031e6707] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 709.815706] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-fb1a5860-3241-4205-9806-fdd1da71d81a req-4560c70e-85ca-4140-886a-86f99fb0733f service nova] Expecting reply to msg 8381e951568b4e8e96c75ed6bccdd5a3 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 709.824080] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8381e951568b4e8e96c75ed6bccdd5a3 [ 709.884031] env[62096]: DEBUG nova.policy [None req-1aa056a4-9f55-42a7-a362-b5b727b382b0 tempest-ServersTestManualDisk-1610766175 tempest-ServersTestManualDisk-1610766175-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '62bb9c82091f4c90b8e2d39229d64b3d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cf633d5ff4dc4653a290929591e1a150', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62096) authorize /opt/stack/nova/nova/policy.py:203}} [ 710.075031] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4297b675-cc6b-4a71-99da-96ffd291d708 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.082619] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96222fdb-ae03-418b-b356-d507dccabd82 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.117231] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f6cef63-db1c-4fe3-8662-c16e49157e30 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.125226] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e311ddc-16aa-4ba9-bbca-3658379d0054 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.140788] env[62096]: DEBUG nova.compute.provider_tree [None req-5ad75242-2efb-4f8b-a5d0-817319fe309a tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 710.141285] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-5ad75242-2efb-4f8b-a5d0-817319fe309a tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Expecting reply to msg b657b1d6ae1744fa88c0ab121ac7e3cb in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 710.148459] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b657b1d6ae1744fa88c0ab121ac7e3cb [ 710.190417] env[62096]: DEBUG nova.compute.manager [None req-1aa056a4-9f55-42a7-a362-b5b727b382b0 tempest-ServersTestManualDisk-1610766175 tempest-ServersTestManualDisk-1610766175-project-member] [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] Start building block device mappings for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 710.192537] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1aa056a4-9f55-42a7-a362-b5b727b382b0 tempest-ServersTestManualDisk-1610766175 tempest-ServersTestManualDisk-1610766175-project-member] Expecting reply to msg e8bb8d16f9094daaab0133755303e73d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 710.231722] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e8bb8d16f9094daaab0133755303e73d [ 710.324540] env[62096]: DEBUG oslo_concurrency.lockutils [req-fb1a5860-3241-4205-9806-fdd1da71d81a req-4560c70e-85ca-4140-886a-86f99fb0733f service nova] Releasing lock "refresh_cache-7d62e302-8080-4699-b88d-cb29031e6707" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 710.324960] env[62096]: DEBUG oslo_concurrency.lockutils [None req-f283c87f-a0d6-46b3-ba4c-0ee08c3c4930 tempest-ImagesOneServerNegativeTestJSON-1092536401 tempest-ImagesOneServerNegativeTestJSON-1092536401-project-member] Acquired lock "refresh_cache-7d62e302-8080-4699-b88d-cb29031e6707" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 710.325369] env[62096]: DEBUG nova.network.neutron [None req-f283c87f-a0d6-46b3-ba4c-0ee08c3c4930 tempest-ImagesOneServerNegativeTestJSON-1092536401 tempest-ImagesOneServerNegativeTestJSON-1092536401-project-member] [instance: 7d62e302-8080-4699-b88d-cb29031e6707] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 710.325806] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-f283c87f-a0d6-46b3-ba4c-0ee08c3c4930 tempest-ImagesOneServerNegativeTestJSON-1092536401 tempest-ImagesOneServerNegativeTestJSON-1092536401-project-member] Expecting reply to msg 6c21ec533ddc4214b8688b41ef745516 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 710.332385] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6c21ec533ddc4214b8688b41ef745516 [ 710.570726] env[62096]: DEBUG nova.network.neutron [None req-1aa056a4-9f55-42a7-a362-b5b727b382b0 tempest-ServersTestManualDisk-1610766175 tempest-ServersTestManualDisk-1610766175-project-member] [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] Successfully created port: 04372f13-66f6-4584-8b4a-3d7edb00fb9a {{(pid=62096) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 710.643998] env[62096]: DEBUG nova.scheduler.client.report [None req-5ad75242-2efb-4f8b-a5d0-817319fe309a tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 710.646461] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-5ad75242-2efb-4f8b-a5d0-817319fe309a tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Expecting reply to msg 50d7ed4e69774bb28f1bf50c4dc5b9d4 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 710.658348] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 50d7ed4e69774bb28f1bf50c4dc5b9d4 [ 710.696869] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1aa056a4-9f55-42a7-a362-b5b727b382b0 tempest-ServersTestManualDisk-1610766175 tempest-ServersTestManualDisk-1610766175-project-member] Expecting reply to msg 3f36fe26100f4998b712d044743661b1 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 710.733690] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3f36fe26100f4998b712d044743661b1 [ 710.842711] env[62096]: DEBUG nova.network.neutron [None req-f283c87f-a0d6-46b3-ba4c-0ee08c3c4930 tempest-ImagesOneServerNegativeTestJSON-1092536401 tempest-ImagesOneServerNegativeTestJSON-1092536401-project-member] [instance: 7d62e302-8080-4699-b88d-cb29031e6707] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 710.938217] env[62096]: DEBUG nova.network.neutron [None req-f283c87f-a0d6-46b3-ba4c-0ee08c3c4930 tempest-ImagesOneServerNegativeTestJSON-1092536401 tempest-ImagesOneServerNegativeTestJSON-1092536401-project-member] [instance: 7d62e302-8080-4699-b88d-cb29031e6707] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 710.938753] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-f283c87f-a0d6-46b3-ba4c-0ee08c3c4930 tempest-ImagesOneServerNegativeTestJSON-1092536401 tempest-ImagesOneServerNegativeTestJSON-1092536401-project-member] Expecting reply to msg 6a23d20f9f1d43cb92f612f5541ea72f in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 710.948165] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6a23d20f9f1d43cb92f612f5541ea72f [ 711.155108] env[62096]: DEBUG oslo_concurrency.lockutils [None req-5ad75242-2efb-4f8b-a5d0-817319fe309a tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.966s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 711.155108] env[62096]: ERROR nova.compute.manager [None req-5ad75242-2efb-4f8b-a5d0-817319fe309a tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] [instance: 374ca884-8e77-4568-8667-e124e6df4c75] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 7158528a-233c-4f68-8cce-fcc21e6131a0, please check neutron logs for more information. [ 711.155108] env[62096]: ERROR nova.compute.manager [instance: 374ca884-8e77-4568-8667-e124e6df4c75] Traceback (most recent call last): [ 711.155108] env[62096]: ERROR nova.compute.manager [instance: 374ca884-8e77-4568-8667-e124e6df4c75] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 711.155108] env[62096]: ERROR nova.compute.manager [instance: 374ca884-8e77-4568-8667-e124e6df4c75] self.driver.spawn(context, instance, image_meta, [ 711.155108] env[62096]: ERROR nova.compute.manager [instance: 374ca884-8e77-4568-8667-e124e6df4c75] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 711.155108] env[62096]: ERROR nova.compute.manager [instance: 374ca884-8e77-4568-8667-e124e6df4c75] self._vmops.spawn(context, instance, image_meta, injected_files, [ 711.155108] env[62096]: ERROR nova.compute.manager [instance: 374ca884-8e77-4568-8667-e124e6df4c75] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 711.155108] env[62096]: ERROR nova.compute.manager [instance: 374ca884-8e77-4568-8667-e124e6df4c75] vm_ref = self.build_virtual_machine(instance, [ 711.155768] env[62096]: ERROR nova.compute.manager [instance: 374ca884-8e77-4568-8667-e124e6df4c75] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 711.155768] env[62096]: ERROR nova.compute.manager [instance: 374ca884-8e77-4568-8667-e124e6df4c75] vif_infos = vmwarevif.get_vif_info(self._session, [ 711.155768] env[62096]: ERROR nova.compute.manager [instance: 374ca884-8e77-4568-8667-e124e6df4c75] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 711.155768] env[62096]: ERROR nova.compute.manager [instance: 374ca884-8e77-4568-8667-e124e6df4c75] for vif in network_info: [ 711.155768] env[62096]: ERROR nova.compute.manager [instance: 374ca884-8e77-4568-8667-e124e6df4c75] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 711.155768] env[62096]: ERROR nova.compute.manager [instance: 374ca884-8e77-4568-8667-e124e6df4c75] return self._sync_wrapper(fn, *args, **kwargs) [ 711.155768] env[62096]: ERROR nova.compute.manager [instance: 374ca884-8e77-4568-8667-e124e6df4c75] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 711.155768] env[62096]: ERROR nova.compute.manager [instance: 374ca884-8e77-4568-8667-e124e6df4c75] self.wait() [ 711.155768] env[62096]: ERROR nova.compute.manager [instance: 374ca884-8e77-4568-8667-e124e6df4c75] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 711.155768] env[62096]: ERROR nova.compute.manager [instance: 374ca884-8e77-4568-8667-e124e6df4c75] self[:] = self._gt.wait() [ 711.155768] env[62096]: ERROR nova.compute.manager [instance: 374ca884-8e77-4568-8667-e124e6df4c75] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 711.155768] env[62096]: ERROR nova.compute.manager [instance: 374ca884-8e77-4568-8667-e124e6df4c75] return self._exit_event.wait() [ 711.155768] env[62096]: ERROR nova.compute.manager [instance: 374ca884-8e77-4568-8667-e124e6df4c75] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 711.156321] env[62096]: ERROR nova.compute.manager [instance: 374ca884-8e77-4568-8667-e124e6df4c75] current.throw(*self._exc) [ 711.156321] env[62096]: ERROR nova.compute.manager [instance: 374ca884-8e77-4568-8667-e124e6df4c75] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 711.156321] env[62096]: ERROR nova.compute.manager [instance: 374ca884-8e77-4568-8667-e124e6df4c75] result = function(*args, **kwargs) [ 711.156321] env[62096]: ERROR nova.compute.manager [instance: 374ca884-8e77-4568-8667-e124e6df4c75] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 711.156321] env[62096]: ERROR nova.compute.manager [instance: 374ca884-8e77-4568-8667-e124e6df4c75] return func(*args, **kwargs) [ 711.156321] env[62096]: ERROR nova.compute.manager [instance: 374ca884-8e77-4568-8667-e124e6df4c75] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 711.156321] env[62096]: ERROR nova.compute.manager [instance: 374ca884-8e77-4568-8667-e124e6df4c75] raise e [ 711.156321] env[62096]: ERROR nova.compute.manager [instance: 374ca884-8e77-4568-8667-e124e6df4c75] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 711.156321] env[62096]: ERROR nova.compute.manager [instance: 374ca884-8e77-4568-8667-e124e6df4c75] nwinfo = self.network_api.allocate_for_instance( [ 711.156321] env[62096]: ERROR nova.compute.manager [instance: 374ca884-8e77-4568-8667-e124e6df4c75] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 711.156321] env[62096]: ERROR nova.compute.manager [instance: 374ca884-8e77-4568-8667-e124e6df4c75] created_port_ids = self._update_ports_for_instance( [ 711.156321] env[62096]: ERROR nova.compute.manager [instance: 374ca884-8e77-4568-8667-e124e6df4c75] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 711.156321] env[62096]: ERROR nova.compute.manager [instance: 374ca884-8e77-4568-8667-e124e6df4c75] with excutils.save_and_reraise_exception(): [ 711.156857] env[62096]: ERROR nova.compute.manager [instance: 374ca884-8e77-4568-8667-e124e6df4c75] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 711.156857] env[62096]: ERROR nova.compute.manager [instance: 374ca884-8e77-4568-8667-e124e6df4c75] self.force_reraise() [ 711.156857] env[62096]: ERROR nova.compute.manager [instance: 374ca884-8e77-4568-8667-e124e6df4c75] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 711.156857] env[62096]: ERROR nova.compute.manager [instance: 374ca884-8e77-4568-8667-e124e6df4c75] raise self.value [ 711.156857] env[62096]: ERROR nova.compute.manager [instance: 374ca884-8e77-4568-8667-e124e6df4c75] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 711.156857] env[62096]: ERROR nova.compute.manager [instance: 374ca884-8e77-4568-8667-e124e6df4c75] updated_port = self._update_port( [ 711.156857] env[62096]: ERROR nova.compute.manager [instance: 374ca884-8e77-4568-8667-e124e6df4c75] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 711.156857] env[62096]: ERROR nova.compute.manager [instance: 374ca884-8e77-4568-8667-e124e6df4c75] _ensure_no_port_binding_failure(port) [ 711.156857] env[62096]: ERROR nova.compute.manager [instance: 374ca884-8e77-4568-8667-e124e6df4c75] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 711.156857] env[62096]: ERROR nova.compute.manager [instance: 374ca884-8e77-4568-8667-e124e6df4c75] raise exception.PortBindingFailed(port_id=port['id']) [ 711.156857] env[62096]: ERROR nova.compute.manager [instance: 374ca884-8e77-4568-8667-e124e6df4c75] nova.exception.PortBindingFailed: Binding failed for port 7158528a-233c-4f68-8cce-fcc21e6131a0, please check neutron logs for more information. [ 711.156857] env[62096]: ERROR nova.compute.manager [instance: 374ca884-8e77-4568-8667-e124e6df4c75] [ 711.157247] env[62096]: DEBUG nova.compute.utils [None req-5ad75242-2efb-4f8b-a5d0-817319fe309a tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] [instance: 374ca884-8e77-4568-8667-e124e6df4c75] Binding failed for port 7158528a-233c-4f68-8cce-fcc21e6131a0, please check neutron logs for more information. {{(pid=62096) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 711.157247] env[62096]: DEBUG oslo_concurrency.lockutils [None req-7f410855-d894-4223-982d-162ddb3e3fc7 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.455s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 711.158714] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-7f410855-d894-4223-982d-162ddb3e3fc7 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Expecting reply to msg f0673dfb08544307aee9a21250f38c3f in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 711.161537] env[62096]: DEBUG nova.compute.manager [None req-5ad75242-2efb-4f8b-a5d0-817319fe309a tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] [instance: 374ca884-8e77-4568-8667-e124e6df4c75] Build of instance 374ca884-8e77-4568-8667-e124e6df4c75 was re-scheduled: Binding failed for port 7158528a-233c-4f68-8cce-fcc21e6131a0, please check neutron logs for more information. {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 711.161537] env[62096]: DEBUG nova.compute.manager [None req-5ad75242-2efb-4f8b-a5d0-817319fe309a tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] [instance: 374ca884-8e77-4568-8667-e124e6df4c75] Unplugging VIFs for instance {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 711.161537] env[62096]: DEBUG oslo_concurrency.lockutils [None req-5ad75242-2efb-4f8b-a5d0-817319fe309a tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Acquiring lock "refresh_cache-374ca884-8e77-4568-8667-e124e6df4c75" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 711.161537] env[62096]: DEBUG oslo_concurrency.lockutils [None req-5ad75242-2efb-4f8b-a5d0-817319fe309a tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Acquired lock "refresh_cache-374ca884-8e77-4568-8667-e124e6df4c75" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 711.161768] env[62096]: DEBUG nova.network.neutron [None req-5ad75242-2efb-4f8b-a5d0-817319fe309a tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] [instance: 374ca884-8e77-4568-8667-e124e6df4c75] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 711.161768] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-5ad75242-2efb-4f8b-a5d0-817319fe309a tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Expecting reply to msg 4381096d39534e2a85ca3820c8f9ee03 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 711.167812] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4381096d39534e2a85ca3820c8f9ee03 [ 711.194786] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f0673dfb08544307aee9a21250f38c3f [ 711.200657] env[62096]: DEBUG nova.compute.manager [None req-1aa056a4-9f55-42a7-a362-b5b727b382b0 tempest-ServersTestManualDisk-1610766175 tempest-ServersTestManualDisk-1610766175-project-member] [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] Start spawning the instance on the hypervisor. {{(pid=62096) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 711.226140] env[62096]: DEBUG nova.virt.hardware [None req-1aa056a4-9f55-42a7-a362-b5b727b382b0 tempest-ServersTestManualDisk-1610766175 tempest-ServersTestManualDisk-1610766175-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-19T11:52:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-19T11:51:55Z,direct_url=,disk_format='vmdk',id=fb764baa-3805-45c4-a694-aa91b0932110,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bd76085d004b408bbcf1ebf76ba2593e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-19T11:51:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 711.226140] env[62096]: DEBUG nova.virt.hardware [None req-1aa056a4-9f55-42a7-a362-b5b727b382b0 tempest-ServersTestManualDisk-1610766175 tempest-ServersTestManualDisk-1610766175-project-member] Flavor limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 711.226140] env[62096]: DEBUG nova.virt.hardware [None req-1aa056a4-9f55-42a7-a362-b5b727b382b0 tempest-ServersTestManualDisk-1610766175 tempest-ServersTestManualDisk-1610766175-project-member] Image limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 711.226408] env[62096]: DEBUG nova.virt.hardware [None req-1aa056a4-9f55-42a7-a362-b5b727b382b0 tempest-ServersTestManualDisk-1610766175 tempest-ServersTestManualDisk-1610766175-project-member] Flavor pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 711.226408] env[62096]: DEBUG nova.virt.hardware [None req-1aa056a4-9f55-42a7-a362-b5b727b382b0 tempest-ServersTestManualDisk-1610766175 tempest-ServersTestManualDisk-1610766175-project-member] Image pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 711.226408] env[62096]: DEBUG nova.virt.hardware [None req-1aa056a4-9f55-42a7-a362-b5b727b382b0 tempest-ServersTestManualDisk-1610766175 tempest-ServersTestManualDisk-1610766175-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 711.226408] env[62096]: DEBUG nova.virt.hardware [None req-1aa056a4-9f55-42a7-a362-b5b727b382b0 tempest-ServersTestManualDisk-1610766175 tempest-ServersTestManualDisk-1610766175-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 711.226408] env[62096]: DEBUG nova.virt.hardware [None req-1aa056a4-9f55-42a7-a362-b5b727b382b0 tempest-ServersTestManualDisk-1610766175 tempest-ServersTestManualDisk-1610766175-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 711.226558] env[62096]: DEBUG nova.virt.hardware [None req-1aa056a4-9f55-42a7-a362-b5b727b382b0 tempest-ServersTestManualDisk-1610766175 tempest-ServersTestManualDisk-1610766175-project-member] Got 1 possible topologies {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 711.226558] env[62096]: DEBUG nova.virt.hardware [None req-1aa056a4-9f55-42a7-a362-b5b727b382b0 tempest-ServersTestManualDisk-1610766175 tempest-ServersTestManualDisk-1610766175-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 711.226941] env[62096]: DEBUG nova.virt.hardware [None req-1aa056a4-9f55-42a7-a362-b5b727b382b0 tempest-ServersTestManualDisk-1610766175 tempest-ServersTestManualDisk-1610766175-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 711.228146] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fc4f553-35fb-4c3a-b57b-1f71f4cbcbf7 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.237786] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04a0a01b-b2c3-42cf-9523-07618e8f568d {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.415199] env[62096]: DEBUG nova.compute.manager [req-d64dfd72-ba07-4d15-8eaa-7926fb44b801 req-0dfd9b12-770d-4a09-853b-a8b03879e343 service nova] [instance: 7d62e302-8080-4699-b88d-cb29031e6707] Received event network-vif-deleted-6421eacc-4575-4a31-a454-a75f25e03132 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 711.441532] env[62096]: DEBUG oslo_concurrency.lockutils [None req-f283c87f-a0d6-46b3-ba4c-0ee08c3c4930 tempest-ImagesOneServerNegativeTestJSON-1092536401 tempest-ImagesOneServerNegativeTestJSON-1092536401-project-member] Releasing lock "refresh_cache-7d62e302-8080-4699-b88d-cb29031e6707" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 711.441828] env[62096]: DEBUG nova.compute.manager [None req-f283c87f-a0d6-46b3-ba4c-0ee08c3c4930 tempest-ImagesOneServerNegativeTestJSON-1092536401 tempest-ImagesOneServerNegativeTestJSON-1092536401-project-member] [instance: 7d62e302-8080-4699-b88d-cb29031e6707] Start destroying the instance on the hypervisor. {{(pid=62096) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 711.442026] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-f283c87f-a0d6-46b3-ba4c-0ee08c3c4930 tempest-ImagesOneServerNegativeTestJSON-1092536401 tempest-ImagesOneServerNegativeTestJSON-1092536401-project-member] [instance: 7d62e302-8080-4699-b88d-cb29031e6707] Destroying instance {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 711.442343] env[62096]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e9358dcc-402c-4835-9436-59f2466b86a0 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.454716] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1023dd36-3332-414f-a12e-0956e2cb879c {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.479598] env[62096]: WARNING nova.virt.vmwareapi.vmops [None req-f283c87f-a0d6-46b3-ba4c-0ee08c3c4930 tempest-ImagesOneServerNegativeTestJSON-1092536401 tempest-ImagesOneServerNegativeTestJSON-1092536401-project-member] [instance: 7d62e302-8080-4699-b88d-cb29031e6707] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 7d62e302-8080-4699-b88d-cb29031e6707 could not be found. [ 711.479598] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-f283c87f-a0d6-46b3-ba4c-0ee08c3c4930 tempest-ImagesOneServerNegativeTestJSON-1092536401 tempest-ImagesOneServerNegativeTestJSON-1092536401-project-member] [instance: 7d62e302-8080-4699-b88d-cb29031e6707] Instance destroyed {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 711.479598] env[62096]: INFO nova.compute.manager [None req-f283c87f-a0d6-46b3-ba4c-0ee08c3c4930 tempest-ImagesOneServerNegativeTestJSON-1092536401 tempest-ImagesOneServerNegativeTestJSON-1092536401-project-member] [instance: 7d62e302-8080-4699-b88d-cb29031e6707] Took 0.04 seconds to destroy the instance on the hypervisor. [ 711.479598] env[62096]: DEBUG oslo.service.loopingcall [None req-f283c87f-a0d6-46b3-ba4c-0ee08c3c4930 tempest-ImagesOneServerNegativeTestJSON-1092536401 tempest-ImagesOneServerNegativeTestJSON-1092536401-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 711.479598] env[62096]: DEBUG nova.compute.manager [-] [instance: 7d62e302-8080-4699-b88d-cb29031e6707] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 711.479598] env[62096]: DEBUG nova.network.neutron [-] [instance: 7d62e302-8080-4699-b88d-cb29031e6707] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 711.671270] env[62096]: DEBUG nova.network.neutron [-] [instance: 7d62e302-8080-4699-b88d-cb29031e6707] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 711.671824] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 8eedf58d471a4e4dbe70469681a02360 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 711.678633] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8eedf58d471a4e4dbe70469681a02360 [ 711.853784] env[62096]: DEBUG nova.network.neutron [None req-5ad75242-2efb-4f8b-a5d0-817319fe309a tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] [instance: 374ca884-8e77-4568-8667-e124e6df4c75] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 712.083514] env[62096]: DEBUG nova.network.neutron [None req-5ad75242-2efb-4f8b-a5d0-817319fe309a tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] [instance: 374ca884-8e77-4568-8667-e124e6df4c75] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 712.084128] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-5ad75242-2efb-4f8b-a5d0-817319fe309a tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Expecting reply to msg 8c5825cf16a34b47a1dd55da06c9a7e1 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 712.092719] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8c5825cf16a34b47a1dd55da06c9a7e1 [ 712.139806] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50e349fb-3caf-467d-bd51-1a622f97a543 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.147726] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ac7dffe-6cd2-416c-ab23-9e89ea091ca8 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.176667] env[62096]: DEBUG nova.network.neutron [-] [instance: 7d62e302-8080-4699-b88d-cb29031e6707] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 712.177111] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 8004e36e3f504a3b88c31225d7f45c1f in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 712.178584] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-596504e7-2f3e-4c47-a653-92aecdc54461 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.185980] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa51123b-c2a7-4627-b61e-8fbdad82b799 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.191929] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8004e36e3f504a3b88c31225d7f45c1f [ 712.201623] env[62096]: DEBUG nova.compute.provider_tree [None req-7f410855-d894-4223-982d-162ddb3e3fc7 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 712.202164] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-7f410855-d894-4223-982d-162ddb3e3fc7 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Expecting reply to msg b684af8b718d4c2e8e5537588ccb8c3d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 712.212413] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b684af8b718d4c2e8e5537588ccb8c3d [ 712.250140] env[62096]: ERROR nova.compute.manager [None req-1aa056a4-9f55-42a7-a362-b5b727b382b0 tempest-ServersTestManualDisk-1610766175 tempest-ServersTestManualDisk-1610766175-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 04372f13-66f6-4584-8b4a-3d7edb00fb9a, please check neutron logs for more information. [ 712.250140] env[62096]: ERROR nova.compute.manager Traceback (most recent call last): [ 712.250140] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 712.250140] env[62096]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 712.250140] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 712.250140] env[62096]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 712.250140] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 712.250140] env[62096]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 712.250140] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 712.250140] env[62096]: ERROR nova.compute.manager self.force_reraise() [ 712.250140] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 712.250140] env[62096]: ERROR nova.compute.manager raise self.value [ 712.250140] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 712.250140] env[62096]: ERROR nova.compute.manager updated_port = self._update_port( [ 712.250140] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 712.250140] env[62096]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 712.250720] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 712.250720] env[62096]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 712.250720] env[62096]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 04372f13-66f6-4584-8b4a-3d7edb00fb9a, please check neutron logs for more information. [ 712.250720] env[62096]: ERROR nova.compute.manager [ 712.250720] env[62096]: Traceback (most recent call last): [ 712.250720] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 712.250720] env[62096]: listener.cb(fileno) [ 712.250720] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 712.250720] env[62096]: result = function(*args, **kwargs) [ 712.250720] env[62096]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 712.250720] env[62096]: return func(*args, **kwargs) [ 712.250720] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 712.250720] env[62096]: raise e [ 712.250720] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 712.250720] env[62096]: nwinfo = self.network_api.allocate_for_instance( [ 712.250720] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 712.250720] env[62096]: created_port_ids = self._update_ports_for_instance( [ 712.250720] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 712.250720] env[62096]: with excutils.save_and_reraise_exception(): [ 712.250720] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 712.250720] env[62096]: self.force_reraise() [ 712.250720] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 712.250720] env[62096]: raise self.value [ 712.250720] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 712.250720] env[62096]: updated_port = self._update_port( [ 712.250720] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 712.250720] env[62096]: _ensure_no_port_binding_failure(port) [ 712.250720] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 712.250720] env[62096]: raise exception.PortBindingFailed(port_id=port['id']) [ 712.251708] env[62096]: nova.exception.PortBindingFailed: Binding failed for port 04372f13-66f6-4584-8b4a-3d7edb00fb9a, please check neutron logs for more information. [ 712.251708] env[62096]: Removing descriptor: 16 [ 712.251708] env[62096]: ERROR nova.compute.manager [None req-1aa056a4-9f55-42a7-a362-b5b727b382b0 tempest-ServersTestManualDisk-1610766175 tempest-ServersTestManualDisk-1610766175-project-member] [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 04372f13-66f6-4584-8b4a-3d7edb00fb9a, please check neutron logs for more information. [ 712.251708] env[62096]: ERROR nova.compute.manager [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] Traceback (most recent call last): [ 712.251708] env[62096]: ERROR nova.compute.manager [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 712.251708] env[62096]: ERROR nova.compute.manager [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] yield resources [ 712.251708] env[62096]: ERROR nova.compute.manager [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 712.251708] env[62096]: ERROR nova.compute.manager [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] self.driver.spawn(context, instance, image_meta, [ 712.251708] env[62096]: ERROR nova.compute.manager [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 712.251708] env[62096]: ERROR nova.compute.manager [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] self._vmops.spawn(context, instance, image_meta, injected_files, [ 712.251708] env[62096]: ERROR nova.compute.manager [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 712.251708] env[62096]: ERROR nova.compute.manager [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] vm_ref = self.build_virtual_machine(instance, [ 712.252228] env[62096]: ERROR nova.compute.manager [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 712.252228] env[62096]: ERROR nova.compute.manager [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] vif_infos = vmwarevif.get_vif_info(self._session, [ 712.252228] env[62096]: ERROR nova.compute.manager [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 712.252228] env[62096]: ERROR nova.compute.manager [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] for vif in network_info: [ 712.252228] env[62096]: ERROR nova.compute.manager [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 712.252228] env[62096]: ERROR nova.compute.manager [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] return self._sync_wrapper(fn, *args, **kwargs) [ 712.252228] env[62096]: ERROR nova.compute.manager [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 712.252228] env[62096]: ERROR nova.compute.manager [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] self.wait() [ 712.252228] env[62096]: ERROR nova.compute.manager [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 712.252228] env[62096]: ERROR nova.compute.manager [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] self[:] = self._gt.wait() [ 712.252228] env[62096]: ERROR nova.compute.manager [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 712.252228] env[62096]: ERROR nova.compute.manager [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] return self._exit_event.wait() [ 712.252228] env[62096]: ERROR nova.compute.manager [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 712.252691] env[62096]: ERROR nova.compute.manager [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] result = hub.switch() [ 712.252691] env[62096]: ERROR nova.compute.manager [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 712.252691] env[62096]: ERROR nova.compute.manager [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] return self.greenlet.switch() [ 712.252691] env[62096]: ERROR nova.compute.manager [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 712.252691] env[62096]: ERROR nova.compute.manager [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] result = function(*args, **kwargs) [ 712.252691] env[62096]: ERROR nova.compute.manager [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 712.252691] env[62096]: ERROR nova.compute.manager [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] return func(*args, **kwargs) [ 712.252691] env[62096]: ERROR nova.compute.manager [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 712.252691] env[62096]: ERROR nova.compute.manager [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] raise e [ 712.252691] env[62096]: ERROR nova.compute.manager [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 712.252691] env[62096]: ERROR nova.compute.manager [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] nwinfo = self.network_api.allocate_for_instance( [ 712.252691] env[62096]: ERROR nova.compute.manager [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 712.252691] env[62096]: ERROR nova.compute.manager [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] created_port_ids = self._update_ports_for_instance( [ 712.253106] env[62096]: ERROR nova.compute.manager [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 712.253106] env[62096]: ERROR nova.compute.manager [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] with excutils.save_and_reraise_exception(): [ 712.253106] env[62096]: ERROR nova.compute.manager [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 712.253106] env[62096]: ERROR nova.compute.manager [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] self.force_reraise() [ 712.253106] env[62096]: ERROR nova.compute.manager [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 712.253106] env[62096]: ERROR nova.compute.manager [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] raise self.value [ 712.253106] env[62096]: ERROR nova.compute.manager [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 712.253106] env[62096]: ERROR nova.compute.manager [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] updated_port = self._update_port( [ 712.253106] env[62096]: ERROR nova.compute.manager [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 712.253106] env[62096]: ERROR nova.compute.manager [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] _ensure_no_port_binding_failure(port) [ 712.253106] env[62096]: ERROR nova.compute.manager [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 712.253106] env[62096]: ERROR nova.compute.manager [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] raise exception.PortBindingFailed(port_id=port['id']) [ 712.253494] env[62096]: ERROR nova.compute.manager [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] nova.exception.PortBindingFailed: Binding failed for port 04372f13-66f6-4584-8b4a-3d7edb00fb9a, please check neutron logs for more information. [ 712.253494] env[62096]: ERROR nova.compute.manager [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] [ 712.253494] env[62096]: INFO nova.compute.manager [None req-1aa056a4-9f55-42a7-a362-b5b727b382b0 tempest-ServersTestManualDisk-1610766175 tempest-ServersTestManualDisk-1610766175-project-member] [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] Terminating instance [ 712.254003] env[62096]: DEBUG oslo_concurrency.lockutils [None req-1aa056a4-9f55-42a7-a362-b5b727b382b0 tempest-ServersTestManualDisk-1610766175 tempest-ServersTestManualDisk-1610766175-project-member] Acquiring lock "refresh_cache-3480c79b-58e4-4759-acd4-b2f45f22da54" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 712.254168] env[62096]: DEBUG oslo_concurrency.lockutils [None req-1aa056a4-9f55-42a7-a362-b5b727b382b0 tempest-ServersTestManualDisk-1610766175 tempest-ServersTestManualDisk-1610766175-project-member] Acquired lock "refresh_cache-3480c79b-58e4-4759-acd4-b2f45f22da54" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 712.254329] env[62096]: DEBUG nova.network.neutron [None req-1aa056a4-9f55-42a7-a362-b5b727b382b0 tempest-ServersTestManualDisk-1610766175 tempest-ServersTestManualDisk-1610766175-project-member] [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 712.254794] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1aa056a4-9f55-42a7-a362-b5b727b382b0 tempest-ServersTestManualDisk-1610766175 tempest-ServersTestManualDisk-1610766175-project-member] Expecting reply to msg 944a37908fc44e89899589a0ccb400d8 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 712.261182] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 944a37908fc44e89899589a0ccb400d8 [ 712.586721] env[62096]: DEBUG oslo_concurrency.lockutils [None req-5ad75242-2efb-4f8b-a5d0-817319fe309a tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Releasing lock "refresh_cache-374ca884-8e77-4568-8667-e124e6df4c75" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 712.586976] env[62096]: DEBUG nova.compute.manager [None req-5ad75242-2efb-4f8b-a5d0-817319fe309a tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 712.587164] env[62096]: DEBUG nova.compute.manager [None req-5ad75242-2efb-4f8b-a5d0-817319fe309a tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] [instance: 374ca884-8e77-4568-8667-e124e6df4c75] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 712.587331] env[62096]: DEBUG nova.network.neutron [None req-5ad75242-2efb-4f8b-a5d0-817319fe309a tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] [instance: 374ca884-8e77-4568-8667-e124e6df4c75] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 712.608840] env[62096]: DEBUG nova.network.neutron [None req-5ad75242-2efb-4f8b-a5d0-817319fe309a tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] [instance: 374ca884-8e77-4568-8667-e124e6df4c75] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 712.609421] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-5ad75242-2efb-4f8b-a5d0-817319fe309a tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Expecting reply to msg 8c0695fe5d3e42d1ac6e9dd9c5fd979f in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 712.617429] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8c0695fe5d3e42d1ac6e9dd9c5fd979f [ 712.682479] env[62096]: INFO nova.compute.manager [-] [instance: 7d62e302-8080-4699-b88d-cb29031e6707] Took 1.20 seconds to deallocate network for instance. [ 712.684842] env[62096]: DEBUG nova.compute.claims [None req-f283c87f-a0d6-46b3-ba4c-0ee08c3c4930 tempest-ImagesOneServerNegativeTestJSON-1092536401 tempest-ImagesOneServerNegativeTestJSON-1092536401-project-member] [instance: 7d62e302-8080-4699-b88d-cb29031e6707] Aborting claim: {{(pid=62096) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 712.685058] env[62096]: DEBUG oslo_concurrency.lockutils [None req-f283c87f-a0d6-46b3-ba4c-0ee08c3c4930 tempest-ImagesOneServerNegativeTestJSON-1092536401 tempest-ImagesOneServerNegativeTestJSON-1092536401-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 712.704266] env[62096]: DEBUG nova.scheduler.client.report [None req-7f410855-d894-4223-982d-162ddb3e3fc7 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 712.706894] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-7f410855-d894-4223-982d-162ddb3e3fc7 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Expecting reply to msg 1b3ee35c088946d6bf6d5d04469f23a6 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 712.721485] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1b3ee35c088946d6bf6d5d04469f23a6 [ 712.770783] env[62096]: DEBUG nova.network.neutron [None req-1aa056a4-9f55-42a7-a362-b5b727b382b0 tempest-ServersTestManualDisk-1610766175 tempest-ServersTestManualDisk-1610766175-project-member] [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 712.842815] env[62096]: DEBUG nova.network.neutron [None req-1aa056a4-9f55-42a7-a362-b5b727b382b0 tempest-ServersTestManualDisk-1610766175 tempest-ServersTestManualDisk-1610766175-project-member] [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 712.842815] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1aa056a4-9f55-42a7-a362-b5b727b382b0 tempest-ServersTestManualDisk-1610766175 tempest-ServersTestManualDisk-1610766175-project-member] Expecting reply to msg 132250d3dba74ff69c59d347779a5ba2 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 712.852612] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 132250d3dba74ff69c59d347779a5ba2 [ 713.112054] env[62096]: DEBUG nova.network.neutron [None req-5ad75242-2efb-4f8b-a5d0-817319fe309a tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] [instance: 374ca884-8e77-4568-8667-e124e6df4c75] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 713.112618] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-5ad75242-2efb-4f8b-a5d0-817319fe309a tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Expecting reply to msg 8feb409d189049a2aa5bd4c1982dbb00 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 713.121132] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8feb409d189049a2aa5bd4c1982dbb00 [ 713.209700] env[62096]: DEBUG oslo_concurrency.lockutils [None req-7f410855-d894-4223-982d-162ddb3e3fc7 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.057s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 713.210314] env[62096]: ERROR nova.compute.manager [None req-7f410855-d894-4223-982d-162ddb3e3fc7 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port da3e6abf-b3f7-4739-b243-642f489448db, please check neutron logs for more information. [ 713.210314] env[62096]: ERROR nova.compute.manager [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] Traceback (most recent call last): [ 713.210314] env[62096]: ERROR nova.compute.manager [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 713.210314] env[62096]: ERROR nova.compute.manager [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] self.driver.spawn(context, instance, image_meta, [ 713.210314] env[62096]: ERROR nova.compute.manager [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 713.210314] env[62096]: ERROR nova.compute.manager [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] self._vmops.spawn(context, instance, image_meta, injected_files, [ 713.210314] env[62096]: ERROR nova.compute.manager [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 713.210314] env[62096]: ERROR nova.compute.manager [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] vm_ref = self.build_virtual_machine(instance, [ 713.210314] env[62096]: ERROR nova.compute.manager [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 713.210314] env[62096]: ERROR nova.compute.manager [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] vif_infos = vmwarevif.get_vif_info(self._session, [ 713.210314] env[62096]: ERROR nova.compute.manager [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 713.210683] env[62096]: ERROR nova.compute.manager [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] for vif in network_info: [ 713.210683] env[62096]: ERROR nova.compute.manager [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 713.210683] env[62096]: ERROR nova.compute.manager [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] return self._sync_wrapper(fn, *args, **kwargs) [ 713.210683] env[62096]: ERROR nova.compute.manager [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 713.210683] env[62096]: ERROR nova.compute.manager [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] self.wait() [ 713.210683] env[62096]: ERROR nova.compute.manager [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 713.210683] env[62096]: ERROR nova.compute.manager [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] self[:] = self._gt.wait() [ 713.210683] env[62096]: ERROR nova.compute.manager [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 713.210683] env[62096]: ERROR nova.compute.manager [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] return self._exit_event.wait() [ 713.210683] env[62096]: ERROR nova.compute.manager [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 713.210683] env[62096]: ERROR nova.compute.manager [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] current.throw(*self._exc) [ 713.210683] env[62096]: ERROR nova.compute.manager [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 713.210683] env[62096]: ERROR nova.compute.manager [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] result = function(*args, **kwargs) [ 713.211042] env[62096]: ERROR nova.compute.manager [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 713.211042] env[62096]: ERROR nova.compute.manager [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] return func(*args, **kwargs) [ 713.211042] env[62096]: ERROR nova.compute.manager [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 713.211042] env[62096]: ERROR nova.compute.manager [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] raise e [ 713.211042] env[62096]: ERROR nova.compute.manager [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 713.211042] env[62096]: ERROR nova.compute.manager [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] nwinfo = self.network_api.allocate_for_instance( [ 713.211042] env[62096]: ERROR nova.compute.manager [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 713.211042] env[62096]: ERROR nova.compute.manager [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] created_port_ids = self._update_ports_for_instance( [ 713.211042] env[62096]: ERROR nova.compute.manager [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 713.211042] env[62096]: ERROR nova.compute.manager [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] with excutils.save_and_reraise_exception(): [ 713.211042] env[62096]: ERROR nova.compute.manager [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 713.211042] env[62096]: ERROR nova.compute.manager [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] self.force_reraise() [ 713.211042] env[62096]: ERROR nova.compute.manager [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 713.211447] env[62096]: ERROR nova.compute.manager [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] raise self.value [ 713.211447] env[62096]: ERROR nova.compute.manager [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 713.211447] env[62096]: ERROR nova.compute.manager [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] updated_port = self._update_port( [ 713.211447] env[62096]: ERROR nova.compute.manager [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 713.211447] env[62096]: ERROR nova.compute.manager [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] _ensure_no_port_binding_failure(port) [ 713.211447] env[62096]: ERROR nova.compute.manager [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 713.211447] env[62096]: ERROR nova.compute.manager [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] raise exception.PortBindingFailed(port_id=port['id']) [ 713.211447] env[62096]: ERROR nova.compute.manager [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] nova.exception.PortBindingFailed: Binding failed for port da3e6abf-b3f7-4739-b243-642f489448db, please check neutron logs for more information. [ 713.211447] env[62096]: ERROR nova.compute.manager [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] [ 713.211447] env[62096]: DEBUG nova.compute.utils [None req-7f410855-d894-4223-982d-162ddb3e3fc7 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] Binding failed for port da3e6abf-b3f7-4739-b243-642f489448db, please check neutron logs for more information. {{(pid=62096) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 713.212249] env[62096]: DEBUG oslo_concurrency.lockutils [None req-6d518a50-dadb-4b6d-9314-0f65d9990dab tempest-ServersNegativeTestMultiTenantJSON-1390208586 tempest-ServersNegativeTestMultiTenantJSON-1390208586-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.755s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 713.213628] env[62096]: INFO nova.compute.claims [None req-6d518a50-dadb-4b6d-9314-0f65d9990dab tempest-ServersNegativeTestMultiTenantJSON-1390208586 tempest-ServersNegativeTestMultiTenantJSON-1390208586-project-member] [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 713.215239] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-6d518a50-dadb-4b6d-9314-0f65d9990dab tempest-ServersNegativeTestMultiTenantJSON-1390208586 tempest-ServersNegativeTestMultiTenantJSON-1390208586-project-member] Expecting reply to msg 4c809d3ded5c406db562638308750fee in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 713.216428] env[62096]: DEBUG nova.compute.manager [None req-7f410855-d894-4223-982d-162ddb3e3fc7 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] Build of instance 575e00ca-0f97-42c5-9e4d-706c21453210 was re-scheduled: Binding failed for port da3e6abf-b3f7-4739-b243-642f489448db, please check neutron logs for more information. {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 713.216845] env[62096]: DEBUG nova.compute.manager [None req-7f410855-d894-4223-982d-162ddb3e3fc7 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] Unplugging VIFs for instance {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 713.217070] env[62096]: DEBUG oslo_concurrency.lockutils [None req-7f410855-d894-4223-982d-162ddb3e3fc7 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Acquiring lock "refresh_cache-575e00ca-0f97-42c5-9e4d-706c21453210" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 713.217212] env[62096]: DEBUG oslo_concurrency.lockutils [None req-7f410855-d894-4223-982d-162ddb3e3fc7 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Acquired lock "refresh_cache-575e00ca-0f97-42c5-9e4d-706c21453210" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 713.217358] env[62096]: DEBUG nova.network.neutron [None req-7f410855-d894-4223-982d-162ddb3e3fc7 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 713.217699] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-7f410855-d894-4223-982d-162ddb3e3fc7 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Expecting reply to msg 5a3097f926364b92b0c3783e1e069f6a in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 713.243175] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5a3097f926364b92b0c3783e1e069f6a [ 713.269319] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4c809d3ded5c406db562638308750fee [ 713.344607] env[62096]: DEBUG oslo_concurrency.lockutils [None req-1aa056a4-9f55-42a7-a362-b5b727b382b0 tempest-ServersTestManualDisk-1610766175 tempest-ServersTestManualDisk-1610766175-project-member] Releasing lock "refresh_cache-3480c79b-58e4-4759-acd4-b2f45f22da54" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 713.345037] env[62096]: DEBUG nova.compute.manager [None req-1aa056a4-9f55-42a7-a362-b5b727b382b0 tempest-ServersTestManualDisk-1610766175 tempest-ServersTestManualDisk-1610766175-project-member] [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] Start destroying the instance on the hypervisor. {{(pid=62096) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 713.345227] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-1aa056a4-9f55-42a7-a362-b5b727b382b0 tempest-ServersTestManualDisk-1610766175 tempest-ServersTestManualDisk-1610766175-project-member] [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] Destroying instance {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 713.345530] env[62096]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4e482d7d-6fd9-4a4c-91f0-f799e95bafbf {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.354207] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4efd1a7-fd1b-4262-a0ef-03f8ec3e4d69 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.380527] env[62096]: WARNING nova.virt.vmwareapi.vmops [None req-1aa056a4-9f55-42a7-a362-b5b727b382b0 tempest-ServersTestManualDisk-1610766175 tempest-ServersTestManualDisk-1610766175-project-member] [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 3480c79b-58e4-4759-acd4-b2f45f22da54 could not be found. [ 713.380527] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-1aa056a4-9f55-42a7-a362-b5b727b382b0 tempest-ServersTestManualDisk-1610766175 tempest-ServersTestManualDisk-1610766175-project-member] [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] Instance destroyed {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 713.380527] env[62096]: INFO nova.compute.manager [None req-1aa056a4-9f55-42a7-a362-b5b727b382b0 tempest-ServersTestManualDisk-1610766175 tempest-ServersTestManualDisk-1610766175-project-member] [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] Took 0.03 seconds to destroy the instance on the hypervisor. [ 713.380527] env[62096]: DEBUG oslo.service.loopingcall [None req-1aa056a4-9f55-42a7-a362-b5b727b382b0 tempest-ServersTestManualDisk-1610766175 tempest-ServersTestManualDisk-1610766175-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 713.380527] env[62096]: DEBUG nova.compute.manager [-] [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 713.380527] env[62096]: DEBUG nova.network.neutron [-] [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 713.394657] env[62096]: DEBUG nova.network.neutron [-] [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 713.395312] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 4c093259857149a497086ca2dac8fab3 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 713.404211] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4c093259857149a497086ca2dac8fab3 [ 713.452424] env[62096]: DEBUG nova.compute.manager [req-59084d2f-9886-422e-b7d4-5aa9363d3b82 req-26c102de-7a88-4f83-bb7a-a48ec968a98b service nova] [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] Received event network-changed-04372f13-66f6-4584-8b4a-3d7edb00fb9a {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 713.452615] env[62096]: DEBUG nova.compute.manager [req-59084d2f-9886-422e-b7d4-5aa9363d3b82 req-26c102de-7a88-4f83-bb7a-a48ec968a98b service nova] [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] Refreshing instance network info cache due to event network-changed-04372f13-66f6-4584-8b4a-3d7edb00fb9a. {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 713.452833] env[62096]: DEBUG oslo_concurrency.lockutils [req-59084d2f-9886-422e-b7d4-5aa9363d3b82 req-26c102de-7a88-4f83-bb7a-a48ec968a98b service nova] Acquiring lock "refresh_cache-3480c79b-58e4-4759-acd4-b2f45f22da54" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 713.452977] env[62096]: DEBUG oslo_concurrency.lockutils [req-59084d2f-9886-422e-b7d4-5aa9363d3b82 req-26c102de-7a88-4f83-bb7a-a48ec968a98b service nova] Acquired lock "refresh_cache-3480c79b-58e4-4759-acd4-b2f45f22da54" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 713.453145] env[62096]: DEBUG nova.network.neutron [req-59084d2f-9886-422e-b7d4-5aa9363d3b82 req-26c102de-7a88-4f83-bb7a-a48ec968a98b service nova] [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] Refreshing network info cache for port 04372f13-66f6-4584-8b4a-3d7edb00fb9a {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 713.453535] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-59084d2f-9886-422e-b7d4-5aa9363d3b82 req-26c102de-7a88-4f83-bb7a-a48ec968a98b service nova] Expecting reply to msg e14b398d39d942e49f739c85cfed2a1c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 713.462149] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e14b398d39d942e49f739c85cfed2a1c [ 713.615020] env[62096]: INFO nova.compute.manager [None req-5ad75242-2efb-4f8b-a5d0-817319fe309a tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] [instance: 374ca884-8e77-4568-8667-e124e6df4c75] Took 1.03 seconds to deallocate network for instance. [ 713.616859] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-5ad75242-2efb-4f8b-a5d0-817319fe309a tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Expecting reply to msg 1e503da6bf3f4bc58e2693e615202a1b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 713.650414] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1e503da6bf3f4bc58e2693e615202a1b [ 713.720157] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-6d518a50-dadb-4b6d-9314-0f65d9990dab tempest-ServersNegativeTestMultiTenantJSON-1390208586 tempest-ServersNegativeTestMultiTenantJSON-1390208586-project-member] Expecting reply to msg cf6e4bc6d1734268a17a4542ec601e8f in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 713.731684] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cf6e4bc6d1734268a17a4542ec601e8f [ 713.736830] env[62096]: DEBUG nova.network.neutron [None req-7f410855-d894-4223-982d-162ddb3e3fc7 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 713.844205] env[62096]: DEBUG nova.network.neutron [None req-7f410855-d894-4223-982d-162ddb3e3fc7 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 713.844812] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-7f410855-d894-4223-982d-162ddb3e3fc7 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Expecting reply to msg 4baaf4cfebac47c6844aa4b8e4b465c0 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 713.853720] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4baaf4cfebac47c6844aa4b8e4b465c0 [ 713.897391] env[62096]: DEBUG nova.network.neutron [-] [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 713.897942] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg fb89407cc7b64bc995e39071fd9bd605 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 713.907924] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fb89407cc7b64bc995e39071fd9bd605 [ 713.978589] env[62096]: DEBUG nova.network.neutron [req-59084d2f-9886-422e-b7d4-5aa9363d3b82 req-26c102de-7a88-4f83-bb7a-a48ec968a98b service nova] [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 714.082369] env[62096]: DEBUG nova.network.neutron [req-59084d2f-9886-422e-b7d4-5aa9363d3b82 req-26c102de-7a88-4f83-bb7a-a48ec968a98b service nova] [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 714.082901] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-59084d2f-9886-422e-b7d4-5aa9363d3b82 req-26c102de-7a88-4f83-bb7a-a48ec968a98b service nova] Expecting reply to msg 775f43adcee74084b6e6e45849469c4b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 714.091316] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 775f43adcee74084b6e6e45849469c4b [ 714.121703] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-5ad75242-2efb-4f8b-a5d0-817319fe309a tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Expecting reply to msg 91ffb7141d9342f7ab15cd7770a7e9c5 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 714.155969] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 91ffb7141d9342f7ab15cd7770a7e9c5 [ 714.347667] env[62096]: DEBUG oslo_concurrency.lockutils [None req-7f410855-d894-4223-982d-162ddb3e3fc7 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Releasing lock "refresh_cache-575e00ca-0f97-42c5-9e4d-706c21453210" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 714.347667] env[62096]: DEBUG nova.compute.manager [None req-7f410855-d894-4223-982d-162ddb3e3fc7 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 714.347970] env[62096]: DEBUG nova.compute.manager [None req-7f410855-d894-4223-982d-162ddb3e3fc7 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 714.347970] env[62096]: DEBUG nova.network.neutron [None req-7f410855-d894-4223-982d-162ddb3e3fc7 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 714.370566] env[62096]: DEBUG nova.network.neutron [None req-7f410855-d894-4223-982d-162ddb3e3fc7 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 714.371134] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-7f410855-d894-4223-982d-162ddb3e3fc7 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Expecting reply to msg 42f149681fc7403a9301501bf9062f04 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 714.380134] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 42f149681fc7403a9301501bf9062f04 [ 714.400206] env[62096]: INFO nova.compute.manager [-] [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] Took 1.02 seconds to deallocate network for instance. [ 714.402407] env[62096]: DEBUG nova.compute.claims [None req-1aa056a4-9f55-42a7-a362-b5b727b382b0 tempest-ServersTestManualDisk-1610766175 tempest-ServersTestManualDisk-1610766175-project-member] [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] Aborting claim: {{(pid=62096) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 714.402583] env[62096]: DEBUG oslo_concurrency.lockutils [None req-1aa056a4-9f55-42a7-a362-b5b727b382b0 tempest-ServersTestManualDisk-1610766175 tempest-ServersTestManualDisk-1610766175-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 714.586192] env[62096]: DEBUG oslo_concurrency.lockutils [req-59084d2f-9886-422e-b7d4-5aa9363d3b82 req-26c102de-7a88-4f83-bb7a-a48ec968a98b service nova] Releasing lock "refresh_cache-3480c79b-58e4-4759-acd4-b2f45f22da54" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 714.586192] env[62096]: DEBUG nova.compute.manager [req-59084d2f-9886-422e-b7d4-5aa9363d3b82 req-26c102de-7a88-4f83-bb7a-a48ec968a98b service nova] [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] Received event network-vif-deleted-04372f13-66f6-4584-8b4a-3d7edb00fb9a {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 714.650265] env[62096]: INFO nova.scheduler.client.report [None req-5ad75242-2efb-4f8b-a5d0-817319fe309a tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Deleted allocations for instance 374ca884-8e77-4568-8667-e124e6df4c75 [ 714.661175] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-5ad75242-2efb-4f8b-a5d0-817319fe309a tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Expecting reply to msg c7147518c4ea480f9d891a4b689c62d2 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 714.675061] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e30e3ce8-4da8-4a46-b790-4a5df5cc75d3 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.684495] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b29bde59-d5c2-4fdb-8625-93feb291b40f {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.720136] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c7147518c4ea480f9d891a4b689c62d2 [ 714.721092] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b800d270-c7af-41ea-aeca-cafe1b33df4a {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.729582] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7117117f-18e2-4fe8-9980-bd4f9ab1170d {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.744583] env[62096]: DEBUG nova.compute.provider_tree [None req-6d518a50-dadb-4b6d-9314-0f65d9990dab tempest-ServersNegativeTestMultiTenantJSON-1390208586 tempest-ServersNegativeTestMultiTenantJSON-1390208586-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 714.745117] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-6d518a50-dadb-4b6d-9314-0f65d9990dab tempest-ServersNegativeTestMultiTenantJSON-1390208586 tempest-ServersNegativeTestMultiTenantJSON-1390208586-project-member] Expecting reply to msg 333a7b934b7f425a83616e5548a6df6c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 714.752889] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 333a7b934b7f425a83616e5548a6df6c [ 714.879108] env[62096]: DEBUG nova.network.neutron [None req-7f410855-d894-4223-982d-162ddb3e3fc7 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 714.879665] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-7f410855-d894-4223-982d-162ddb3e3fc7 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Expecting reply to msg 3b581c86fa314d73a899f09e5db6095d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 714.891425] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3b581c86fa314d73a899f09e5db6095d [ 715.163110] env[62096]: DEBUG oslo_concurrency.lockutils [None req-5ad75242-2efb-4f8b-a5d0-817319fe309a tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Lock "374ca884-8e77-4568-8667-e124e6df4c75" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 131.235s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 715.163773] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d841432a-bb6a-4a14-a0d1-d583a8a5e9c6 tempest-ServerAddressesNegativeTestJSON-1360856578 tempest-ServerAddressesNegativeTestJSON-1360856578-project-member] Expecting reply to msg 2a2590ead603484e9fab63f9c3384ea5 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 715.179042] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2a2590ead603484e9fab63f9c3384ea5 [ 715.247809] env[62096]: DEBUG nova.scheduler.client.report [None req-6d518a50-dadb-4b6d-9314-0f65d9990dab tempest-ServersNegativeTestMultiTenantJSON-1390208586 tempest-ServersNegativeTestMultiTenantJSON-1390208586-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 715.250335] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-6d518a50-dadb-4b6d-9314-0f65d9990dab tempest-ServersNegativeTestMultiTenantJSON-1390208586 tempest-ServersNegativeTestMultiTenantJSON-1390208586-project-member] Expecting reply to msg 6f43003aee7543bea2e2b77fc1846098 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 715.265727] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6f43003aee7543bea2e2b77fc1846098 [ 715.285622] env[62096]: DEBUG oslo_concurrency.lockutils [None req-4da4b970-cc40-4c6a-8873-d7e30a4b4978 tempest-ServerMetadataTestJSON-1279504626 tempest-ServerMetadataTestJSON-1279504626-project-member] Acquiring lock "b54dd1f8-2e8d-446d-9145-d034664b7069" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 715.285622] env[62096]: DEBUG oslo_concurrency.lockutils [None req-4da4b970-cc40-4c6a-8873-d7e30a4b4978 tempest-ServerMetadataTestJSON-1279504626 tempest-ServerMetadataTestJSON-1279504626-project-member] Lock "b54dd1f8-2e8d-446d-9145-d034664b7069" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 715.386369] env[62096]: INFO nova.compute.manager [None req-7f410855-d894-4223-982d-162ddb3e3fc7 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] [instance: 575e00ca-0f97-42c5-9e4d-706c21453210] Took 1.04 seconds to deallocate network for instance. [ 715.386369] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-7f410855-d894-4223-982d-162ddb3e3fc7 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Expecting reply to msg d00c6c50687d45239f60ae2ad227ecc7 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 715.423023] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d00c6c50687d45239f60ae2ad227ecc7 [ 715.666824] env[62096]: DEBUG nova.compute.manager [None req-d841432a-bb6a-4a14-a0d1-d583a8a5e9c6 tempest-ServerAddressesNegativeTestJSON-1360856578 tempest-ServerAddressesNegativeTestJSON-1360856578-project-member] [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] Starting instance... {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 715.668291] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d841432a-bb6a-4a14-a0d1-d583a8a5e9c6 tempest-ServerAddressesNegativeTestJSON-1360856578 tempest-ServerAddressesNegativeTestJSON-1360856578-project-member] Expecting reply to msg ddc5bca8b77e4dc9a115b3e7f9d6ff3c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 715.700775] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ddc5bca8b77e4dc9a115b3e7f9d6ff3c [ 715.753054] env[62096]: DEBUG oslo_concurrency.lockutils [None req-6d518a50-dadb-4b6d-9314-0f65d9990dab tempest-ServersNegativeTestMultiTenantJSON-1390208586 tempest-ServersNegativeTestMultiTenantJSON-1390208586-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.541s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 715.753575] env[62096]: DEBUG nova.compute.manager [None req-6d518a50-dadb-4b6d-9314-0f65d9990dab tempest-ServersNegativeTestMultiTenantJSON-1390208586 tempest-ServersNegativeTestMultiTenantJSON-1390208586-project-member] [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] Start building networks asynchronously for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 715.755643] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-6d518a50-dadb-4b6d-9314-0f65d9990dab tempest-ServersNegativeTestMultiTenantJSON-1390208586 tempest-ServersNegativeTestMultiTenantJSON-1390208586-project-member] Expecting reply to msg cba905d1654d48249dcc8d24f49ed8d2 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 715.756826] env[62096]: DEBUG oslo_concurrency.lockutils [None req-fdc6c660-999b-45f5-b0c3-7b576c288147 tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.990s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 715.759587] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-fdc6c660-999b-45f5-b0c3-7b576c288147 tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Expecting reply to msg 7ebb923a1416468eb5de5cfb05af3010 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 715.799976] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7ebb923a1416468eb5de5cfb05af3010 [ 715.799976] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cba905d1654d48249dcc8d24f49ed8d2 [ 715.890911] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-7f410855-d894-4223-982d-162ddb3e3fc7 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Expecting reply to msg c5833d30faa34302883909a6af5872f2 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 715.925700] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c5833d30faa34302883909a6af5872f2 [ 716.190634] env[62096]: DEBUG oslo_concurrency.lockutils [None req-d841432a-bb6a-4a14-a0d1-d583a8a5e9c6 tempest-ServerAddressesNegativeTestJSON-1360856578 tempest-ServerAddressesNegativeTestJSON-1360856578-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 716.263227] env[62096]: DEBUG nova.compute.utils [None req-6d518a50-dadb-4b6d-9314-0f65d9990dab tempest-ServersNegativeTestMultiTenantJSON-1390208586 tempest-ServersNegativeTestMultiTenantJSON-1390208586-project-member] Using /dev/sd instead of None {{(pid=62096) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 716.264131] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-6d518a50-dadb-4b6d-9314-0f65d9990dab tempest-ServersNegativeTestMultiTenantJSON-1390208586 tempest-ServersNegativeTestMultiTenantJSON-1390208586-project-member] Expecting reply to msg 3c4cbbb3dc4e4be6aea43d499c15904a in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 716.268313] env[62096]: DEBUG nova.compute.manager [None req-6d518a50-dadb-4b6d-9314-0f65d9990dab tempest-ServersNegativeTestMultiTenantJSON-1390208586 tempest-ServersNegativeTestMultiTenantJSON-1390208586-project-member] [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] Allocating IP information in the background. {{(pid=62096) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 716.268546] env[62096]: DEBUG nova.network.neutron [None req-6d518a50-dadb-4b6d-9314-0f65d9990dab tempest-ServersNegativeTestMultiTenantJSON-1390208586 tempest-ServersNegativeTestMultiTenantJSON-1390208586-project-member] [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] allocate_for_instance() {{(pid=62096) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 716.273968] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3c4cbbb3dc4e4be6aea43d499c15904a [ 716.354621] env[62096]: DEBUG nova.policy [None req-6d518a50-dadb-4b6d-9314-0f65d9990dab tempest-ServersNegativeTestMultiTenantJSON-1390208586 tempest-ServersNegativeTestMultiTenantJSON-1390208586-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8730accd7ff6480084e473f3906b107f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '76c55bb53fdd4ecd90a0de0a30899a71', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62096) authorize /opt/stack/nova/nova/policy.py:203}} [ 716.413686] env[62096]: INFO nova.scheduler.client.report [None req-7f410855-d894-4223-982d-162ddb3e3fc7 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Deleted allocations for instance 575e00ca-0f97-42c5-9e4d-706c21453210 [ 716.420530] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-7f410855-d894-4223-982d-162ddb3e3fc7 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Expecting reply to msg 7861dbb5ad6f479baa7ae3f05af41a91 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 716.435742] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7861dbb5ad6f479baa7ae3f05af41a91 [ 716.658045] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb6882d8-7ef5-485c-975f-a398eaf650d2 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.664579] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30ea607f-e4eb-48a2-8175-807501f45799 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.695063] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d969602d-54fc-43d7-99f4-ab423ab4efdf {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.702505] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2c8cdce-441a-467d-9c45-5e2e085a449b {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.715149] env[62096]: DEBUG nova.compute.provider_tree [None req-fdc6c660-999b-45f5-b0c3-7b576c288147 tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 716.715690] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-fdc6c660-999b-45f5-b0c3-7b576c288147 tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Expecting reply to msg 07e42d63686a4af589ba61d2239373b1 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 716.724039] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 07e42d63686a4af589ba61d2239373b1 [ 716.770407] env[62096]: DEBUG nova.compute.manager [None req-6d518a50-dadb-4b6d-9314-0f65d9990dab tempest-ServersNegativeTestMultiTenantJSON-1390208586 tempest-ServersNegativeTestMultiTenantJSON-1390208586-project-member] [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] Start building block device mappings for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 716.771903] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-6d518a50-dadb-4b6d-9314-0f65d9990dab tempest-ServersNegativeTestMultiTenantJSON-1390208586 tempest-ServersNegativeTestMultiTenantJSON-1390208586-project-member] Expecting reply to msg fa0f0266eed245c9b04a9ee31f921827 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 716.795153] env[62096]: DEBUG nova.network.neutron [None req-6d518a50-dadb-4b6d-9314-0f65d9990dab tempest-ServersNegativeTestMultiTenantJSON-1390208586 tempest-ServersNegativeTestMultiTenantJSON-1390208586-project-member] [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] Successfully created port: 3d1a8a96-d849-473c-8572-e26b0c89c85a {{(pid=62096) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 716.819919] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fa0f0266eed245c9b04a9ee31f921827 [ 716.925272] env[62096]: DEBUG oslo_concurrency.lockutils [None req-7f410855-d894-4223-982d-162ddb3e3fc7 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Lock "575e00ca-0f97-42c5-9e4d-706c21453210" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 132.106s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 716.925930] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c8a8569a-9a38-4dba-b714-875ef3e36f76 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Expecting reply to msg 5ccbcab5426f4ef085056be7c385aa97 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 716.936441] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5ccbcab5426f4ef085056be7c385aa97 [ 717.218866] env[62096]: DEBUG nova.scheduler.client.report [None req-fdc6c660-999b-45f5-b0c3-7b576c288147 tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 717.221336] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-fdc6c660-999b-45f5-b0c3-7b576c288147 tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Expecting reply to msg d885001b48bb4ccd9747080adad520dd in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 717.235352] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d885001b48bb4ccd9747080adad520dd [ 717.276216] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-6d518a50-dadb-4b6d-9314-0f65d9990dab tempest-ServersNegativeTestMultiTenantJSON-1390208586 tempest-ServersNegativeTestMultiTenantJSON-1390208586-project-member] Expecting reply to msg 167d39c27ce7404d87eb6a08f31b0003 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 717.309732] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 167d39c27ce7404d87eb6a08f31b0003 [ 717.428717] env[62096]: DEBUG nova.compute.manager [None req-c8a8569a-9a38-4dba-b714-875ef3e36f76 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] [instance: 321ab95b-6221-4bab-b442-a90926098dae] Starting instance... {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 717.430458] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c8a8569a-9a38-4dba-b714-875ef3e36f76 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Expecting reply to msg 13cdde52773d404db397b62c4bb82cb5 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 717.464149] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 13cdde52773d404db397b62c4bb82cb5 [ 717.724073] env[62096]: DEBUG oslo_concurrency.lockutils [None req-fdc6c660-999b-45f5-b0c3-7b576c288147 tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.967s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 717.724813] env[62096]: ERROR nova.compute.manager [None req-fdc6c660-999b-45f5-b0c3-7b576c288147 tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port cf058cb6-01d6-407d-b545-720a102be194, please check neutron logs for more information. [ 717.724813] env[62096]: ERROR nova.compute.manager [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] Traceback (most recent call last): [ 717.724813] env[62096]: ERROR nova.compute.manager [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 717.724813] env[62096]: ERROR nova.compute.manager [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] self.driver.spawn(context, instance, image_meta, [ 717.724813] env[62096]: ERROR nova.compute.manager [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 717.724813] env[62096]: ERROR nova.compute.manager [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 717.724813] env[62096]: ERROR nova.compute.manager [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 717.724813] env[62096]: ERROR nova.compute.manager [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] vm_ref = self.build_virtual_machine(instance, [ 717.724813] env[62096]: ERROR nova.compute.manager [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 717.724813] env[62096]: ERROR nova.compute.manager [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] vif_infos = vmwarevif.get_vif_info(self._session, [ 717.724813] env[62096]: ERROR nova.compute.manager [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 717.725195] env[62096]: ERROR nova.compute.manager [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] for vif in network_info: [ 717.725195] env[62096]: ERROR nova.compute.manager [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 717.725195] env[62096]: ERROR nova.compute.manager [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] return self._sync_wrapper(fn, *args, **kwargs) [ 717.725195] env[62096]: ERROR nova.compute.manager [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 717.725195] env[62096]: ERROR nova.compute.manager [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] self.wait() [ 717.725195] env[62096]: ERROR nova.compute.manager [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 717.725195] env[62096]: ERROR nova.compute.manager [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] self[:] = self._gt.wait() [ 717.725195] env[62096]: ERROR nova.compute.manager [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 717.725195] env[62096]: ERROR nova.compute.manager [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] return self._exit_event.wait() [ 717.725195] env[62096]: ERROR nova.compute.manager [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 717.725195] env[62096]: ERROR nova.compute.manager [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] current.throw(*self._exc) [ 717.725195] env[62096]: ERROR nova.compute.manager [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 717.725195] env[62096]: ERROR nova.compute.manager [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] result = function(*args, **kwargs) [ 717.725559] env[62096]: ERROR nova.compute.manager [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 717.725559] env[62096]: ERROR nova.compute.manager [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] return func(*args, **kwargs) [ 717.725559] env[62096]: ERROR nova.compute.manager [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 717.725559] env[62096]: ERROR nova.compute.manager [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] raise e [ 717.725559] env[62096]: ERROR nova.compute.manager [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 717.725559] env[62096]: ERROR nova.compute.manager [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] nwinfo = self.network_api.allocate_for_instance( [ 717.725559] env[62096]: ERROR nova.compute.manager [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 717.725559] env[62096]: ERROR nova.compute.manager [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] created_port_ids = self._update_ports_for_instance( [ 717.725559] env[62096]: ERROR nova.compute.manager [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 717.725559] env[62096]: ERROR nova.compute.manager [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] with excutils.save_and_reraise_exception(): [ 717.725559] env[62096]: ERROR nova.compute.manager [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 717.725559] env[62096]: ERROR nova.compute.manager [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] self.force_reraise() [ 717.725559] env[62096]: ERROR nova.compute.manager [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 717.725930] env[62096]: ERROR nova.compute.manager [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] raise self.value [ 717.725930] env[62096]: ERROR nova.compute.manager [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 717.725930] env[62096]: ERROR nova.compute.manager [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] updated_port = self._update_port( [ 717.725930] env[62096]: ERROR nova.compute.manager [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 717.725930] env[62096]: ERROR nova.compute.manager [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] _ensure_no_port_binding_failure(port) [ 717.725930] env[62096]: ERROR nova.compute.manager [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 717.725930] env[62096]: ERROR nova.compute.manager [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] raise exception.PortBindingFailed(port_id=port['id']) [ 717.725930] env[62096]: ERROR nova.compute.manager [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] nova.exception.PortBindingFailed: Binding failed for port cf058cb6-01d6-407d-b545-720a102be194, please check neutron logs for more information. [ 717.725930] env[62096]: ERROR nova.compute.manager [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] [ 717.725930] env[62096]: DEBUG nova.compute.utils [None req-fdc6c660-999b-45f5-b0c3-7b576c288147 tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] Binding failed for port cf058cb6-01d6-407d-b545-720a102be194, please check neutron logs for more information. {{(pid=62096) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 717.726761] env[62096]: DEBUG oslo_concurrency.lockutils [None req-4c22e505-acaf-4907-b777-48f6dcefcfb3 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.713s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 717.728603] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4c22e505-acaf-4907-b777-48f6dcefcfb3 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362-project-member] Expecting reply to msg 05a911ea3a224452b78556c504a988c6 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 717.730572] env[62096]: DEBUG nova.compute.manager [None req-fdc6c660-999b-45f5-b0c3-7b576c288147 tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] Build of instance 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9 was re-scheduled: Binding failed for port cf058cb6-01d6-407d-b545-720a102be194, please check neutron logs for more information. {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 717.731009] env[62096]: DEBUG nova.compute.manager [None req-fdc6c660-999b-45f5-b0c3-7b576c288147 tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] Unplugging VIFs for instance {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 717.731271] env[62096]: DEBUG oslo_concurrency.lockutils [None req-fdc6c660-999b-45f5-b0c3-7b576c288147 tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Acquiring lock "refresh_cache-1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 717.731450] env[62096]: DEBUG oslo_concurrency.lockutils [None req-fdc6c660-999b-45f5-b0c3-7b576c288147 tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Acquired lock "refresh_cache-1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 717.731643] env[62096]: DEBUG nova.network.neutron [None req-fdc6c660-999b-45f5-b0c3-7b576c288147 tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 717.732073] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-fdc6c660-999b-45f5-b0c3-7b576c288147 tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Expecting reply to msg 866c2eed9e094e3b9c61ca2b60d4b76c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 717.739270] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 866c2eed9e094e3b9c61ca2b60d4b76c [ 717.775752] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 05a911ea3a224452b78556c504a988c6 [ 717.780254] env[62096]: DEBUG nova.compute.manager [None req-6d518a50-dadb-4b6d-9314-0f65d9990dab tempest-ServersNegativeTestMultiTenantJSON-1390208586 tempest-ServersNegativeTestMultiTenantJSON-1390208586-project-member] [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] Start spawning the instance on the hypervisor. {{(pid=62096) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 717.804680] env[62096]: DEBUG nova.virt.hardware [None req-6d518a50-dadb-4b6d-9314-0f65d9990dab tempest-ServersNegativeTestMultiTenantJSON-1390208586 tempest-ServersNegativeTestMultiTenantJSON-1390208586-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-19T11:52:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-19T11:51:55Z,direct_url=,disk_format='vmdk',id=fb764baa-3805-45c4-a694-aa91b0932110,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bd76085d004b408bbcf1ebf76ba2593e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-19T11:51:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 717.804986] env[62096]: DEBUG nova.virt.hardware [None req-6d518a50-dadb-4b6d-9314-0f65d9990dab tempest-ServersNegativeTestMultiTenantJSON-1390208586 tempest-ServersNegativeTestMultiTenantJSON-1390208586-project-member] Flavor limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 717.805153] env[62096]: DEBUG nova.virt.hardware [None req-6d518a50-dadb-4b6d-9314-0f65d9990dab tempest-ServersNegativeTestMultiTenantJSON-1390208586 tempest-ServersNegativeTestMultiTenantJSON-1390208586-project-member] Image limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 717.805334] env[62096]: DEBUG nova.virt.hardware [None req-6d518a50-dadb-4b6d-9314-0f65d9990dab tempest-ServersNegativeTestMultiTenantJSON-1390208586 tempest-ServersNegativeTestMultiTenantJSON-1390208586-project-member] Flavor pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 717.805478] env[62096]: DEBUG nova.virt.hardware [None req-6d518a50-dadb-4b6d-9314-0f65d9990dab tempest-ServersNegativeTestMultiTenantJSON-1390208586 tempest-ServersNegativeTestMultiTenantJSON-1390208586-project-member] Image pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 717.805647] env[62096]: DEBUG nova.virt.hardware [None req-6d518a50-dadb-4b6d-9314-0f65d9990dab tempest-ServersNegativeTestMultiTenantJSON-1390208586 tempest-ServersNegativeTestMultiTenantJSON-1390208586-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 717.805860] env[62096]: DEBUG nova.virt.hardware [None req-6d518a50-dadb-4b6d-9314-0f65d9990dab tempest-ServersNegativeTestMultiTenantJSON-1390208586 tempest-ServersNegativeTestMultiTenantJSON-1390208586-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 717.806083] env[62096]: DEBUG nova.virt.hardware [None req-6d518a50-dadb-4b6d-9314-0f65d9990dab tempest-ServersNegativeTestMultiTenantJSON-1390208586 tempest-ServersNegativeTestMultiTenantJSON-1390208586-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 717.806176] env[62096]: DEBUG nova.virt.hardware [None req-6d518a50-dadb-4b6d-9314-0f65d9990dab tempest-ServersNegativeTestMultiTenantJSON-1390208586 tempest-ServersNegativeTestMultiTenantJSON-1390208586-project-member] Got 1 possible topologies {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 717.806325] env[62096]: DEBUG nova.virt.hardware [None req-6d518a50-dadb-4b6d-9314-0f65d9990dab tempest-ServersNegativeTestMultiTenantJSON-1390208586 tempest-ServersNegativeTestMultiTenantJSON-1390208586-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 717.806557] env[62096]: DEBUG nova.virt.hardware [None req-6d518a50-dadb-4b6d-9314-0f65d9990dab tempest-ServersNegativeTestMultiTenantJSON-1390208586 tempest-ServersNegativeTestMultiTenantJSON-1390208586-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 717.809665] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0eb8f99c-1b27-4d8d-a20a-f3665b7a5744 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.819166] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a605a925-f7a8-4eec-a582-0bd55b1abe43 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.953080] env[62096]: DEBUG oslo_concurrency.lockutils [None req-c8a8569a-9a38-4dba-b714-875ef3e36f76 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 718.291513] env[62096]: DEBUG nova.network.neutron [None req-fdc6c660-999b-45f5-b0c3-7b576c288147 tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 718.346800] env[62096]: DEBUG nova.compute.manager [req-ff0d8fcb-e643-4e17-8d20-3a2ae6645a3d req-10d30044-5301-45e9-a7e0-66ed025d3c32 service nova] [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] Received event network-changed-3d1a8a96-d849-473c-8572-e26b0c89c85a {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 718.347247] env[62096]: DEBUG nova.compute.manager [req-ff0d8fcb-e643-4e17-8d20-3a2ae6645a3d req-10d30044-5301-45e9-a7e0-66ed025d3c32 service nova] [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] Refreshing instance network info cache due to event network-changed-3d1a8a96-d849-473c-8572-e26b0c89c85a. {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 718.347247] env[62096]: DEBUG oslo_concurrency.lockutils [req-ff0d8fcb-e643-4e17-8d20-3a2ae6645a3d req-10d30044-5301-45e9-a7e0-66ed025d3c32 service nova] Acquiring lock "refresh_cache-4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 718.347375] env[62096]: DEBUG oslo_concurrency.lockutils [req-ff0d8fcb-e643-4e17-8d20-3a2ae6645a3d req-10d30044-5301-45e9-a7e0-66ed025d3c32 service nova] Acquired lock "refresh_cache-4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 718.347474] env[62096]: DEBUG nova.network.neutron [req-ff0d8fcb-e643-4e17-8d20-3a2ae6645a3d req-10d30044-5301-45e9-a7e0-66ed025d3c32 service nova] [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] Refreshing network info cache for port 3d1a8a96-d849-473c-8572-e26b0c89c85a {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 718.348105] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-ff0d8fcb-e643-4e17-8d20-3a2ae6645a3d req-10d30044-5301-45e9-a7e0-66ed025d3c32 service nova] Expecting reply to msg aedf010f89a6414e88e242b752fef9e9 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 718.354891] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg aedf010f89a6414e88e242b752fef9e9 [ 718.466603] env[62096]: DEBUG nova.network.neutron [None req-fdc6c660-999b-45f5-b0c3-7b576c288147 tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 718.467100] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-fdc6c660-999b-45f5-b0c3-7b576c288147 tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Expecting reply to msg 79abb1c5bfdc43ae888f8acafaef87ed in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 718.476594] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 79abb1c5bfdc43ae888f8acafaef87ed [ 718.611273] env[62096]: ERROR nova.compute.manager [None req-6d518a50-dadb-4b6d-9314-0f65d9990dab tempest-ServersNegativeTestMultiTenantJSON-1390208586 tempest-ServersNegativeTestMultiTenantJSON-1390208586-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 3d1a8a96-d849-473c-8572-e26b0c89c85a, please check neutron logs for more information. [ 718.611273] env[62096]: ERROR nova.compute.manager Traceback (most recent call last): [ 718.611273] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 718.611273] env[62096]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 718.611273] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 718.611273] env[62096]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 718.611273] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 718.611273] env[62096]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 718.611273] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 718.611273] env[62096]: ERROR nova.compute.manager self.force_reraise() [ 718.611273] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 718.611273] env[62096]: ERROR nova.compute.manager raise self.value [ 718.611273] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 718.611273] env[62096]: ERROR nova.compute.manager updated_port = self._update_port( [ 718.611273] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 718.611273] env[62096]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 718.611892] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 718.611892] env[62096]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 718.611892] env[62096]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 3d1a8a96-d849-473c-8572-e26b0c89c85a, please check neutron logs for more information. [ 718.611892] env[62096]: ERROR nova.compute.manager [ 718.611892] env[62096]: Traceback (most recent call last): [ 718.611892] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 718.611892] env[62096]: listener.cb(fileno) [ 718.611892] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 718.611892] env[62096]: result = function(*args, **kwargs) [ 718.611892] env[62096]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 718.611892] env[62096]: return func(*args, **kwargs) [ 718.611892] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 718.611892] env[62096]: raise e [ 718.611892] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 718.611892] env[62096]: nwinfo = self.network_api.allocate_for_instance( [ 718.611892] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 718.611892] env[62096]: created_port_ids = self._update_ports_for_instance( [ 718.611892] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 718.611892] env[62096]: with excutils.save_and_reraise_exception(): [ 718.611892] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 718.611892] env[62096]: self.force_reraise() [ 718.611892] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 718.611892] env[62096]: raise self.value [ 718.611892] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 718.611892] env[62096]: updated_port = self._update_port( [ 718.611892] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 718.611892] env[62096]: _ensure_no_port_binding_failure(port) [ 718.611892] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 718.611892] env[62096]: raise exception.PortBindingFailed(port_id=port['id']) [ 718.612862] env[62096]: nova.exception.PortBindingFailed: Binding failed for port 3d1a8a96-d849-473c-8572-e26b0c89c85a, please check neutron logs for more information. [ 718.612862] env[62096]: Removing descriptor: 16 [ 718.612862] env[62096]: ERROR nova.compute.manager [None req-6d518a50-dadb-4b6d-9314-0f65d9990dab tempest-ServersNegativeTestMultiTenantJSON-1390208586 tempest-ServersNegativeTestMultiTenantJSON-1390208586-project-member] [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 3d1a8a96-d849-473c-8572-e26b0c89c85a, please check neutron logs for more information. [ 718.612862] env[62096]: ERROR nova.compute.manager [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] Traceback (most recent call last): [ 718.612862] env[62096]: ERROR nova.compute.manager [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 718.612862] env[62096]: ERROR nova.compute.manager [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] yield resources [ 718.612862] env[62096]: ERROR nova.compute.manager [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 718.612862] env[62096]: ERROR nova.compute.manager [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] self.driver.spawn(context, instance, image_meta, [ 718.612862] env[62096]: ERROR nova.compute.manager [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 718.612862] env[62096]: ERROR nova.compute.manager [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] self._vmops.spawn(context, instance, image_meta, injected_files, [ 718.612862] env[62096]: ERROR nova.compute.manager [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 718.612862] env[62096]: ERROR nova.compute.manager [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] vm_ref = self.build_virtual_machine(instance, [ 718.613292] env[62096]: ERROR nova.compute.manager [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 718.613292] env[62096]: ERROR nova.compute.manager [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] vif_infos = vmwarevif.get_vif_info(self._session, [ 718.613292] env[62096]: ERROR nova.compute.manager [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 718.613292] env[62096]: ERROR nova.compute.manager [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] for vif in network_info: [ 718.613292] env[62096]: ERROR nova.compute.manager [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 718.613292] env[62096]: ERROR nova.compute.manager [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] return self._sync_wrapper(fn, *args, **kwargs) [ 718.613292] env[62096]: ERROR nova.compute.manager [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 718.613292] env[62096]: ERROR nova.compute.manager [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] self.wait() [ 718.613292] env[62096]: ERROR nova.compute.manager [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 718.613292] env[62096]: ERROR nova.compute.manager [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] self[:] = self._gt.wait() [ 718.613292] env[62096]: ERROR nova.compute.manager [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 718.613292] env[62096]: ERROR nova.compute.manager [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] return self._exit_event.wait() [ 718.613292] env[62096]: ERROR nova.compute.manager [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 718.613720] env[62096]: ERROR nova.compute.manager [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] result = hub.switch() [ 718.613720] env[62096]: ERROR nova.compute.manager [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 718.613720] env[62096]: ERROR nova.compute.manager [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] return self.greenlet.switch() [ 718.613720] env[62096]: ERROR nova.compute.manager [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 718.613720] env[62096]: ERROR nova.compute.manager [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] result = function(*args, **kwargs) [ 718.613720] env[62096]: ERROR nova.compute.manager [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 718.613720] env[62096]: ERROR nova.compute.manager [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] return func(*args, **kwargs) [ 718.613720] env[62096]: ERROR nova.compute.manager [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 718.613720] env[62096]: ERROR nova.compute.manager [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] raise e [ 718.613720] env[62096]: ERROR nova.compute.manager [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 718.613720] env[62096]: ERROR nova.compute.manager [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] nwinfo = self.network_api.allocate_for_instance( [ 718.613720] env[62096]: ERROR nova.compute.manager [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 718.613720] env[62096]: ERROR nova.compute.manager [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] created_port_ids = self._update_ports_for_instance( [ 718.614166] env[62096]: ERROR nova.compute.manager [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 718.614166] env[62096]: ERROR nova.compute.manager [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] with excutils.save_and_reraise_exception(): [ 718.614166] env[62096]: ERROR nova.compute.manager [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 718.614166] env[62096]: ERROR nova.compute.manager [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] self.force_reraise() [ 718.614166] env[62096]: ERROR nova.compute.manager [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 718.614166] env[62096]: ERROR nova.compute.manager [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] raise self.value [ 718.614166] env[62096]: ERROR nova.compute.manager [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 718.614166] env[62096]: ERROR nova.compute.manager [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] updated_port = self._update_port( [ 718.614166] env[62096]: ERROR nova.compute.manager [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 718.614166] env[62096]: ERROR nova.compute.manager [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] _ensure_no_port_binding_failure(port) [ 718.614166] env[62096]: ERROR nova.compute.manager [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 718.614166] env[62096]: ERROR nova.compute.manager [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] raise exception.PortBindingFailed(port_id=port['id']) [ 718.614565] env[62096]: ERROR nova.compute.manager [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] nova.exception.PortBindingFailed: Binding failed for port 3d1a8a96-d849-473c-8572-e26b0c89c85a, please check neutron logs for more information. [ 718.614565] env[62096]: ERROR nova.compute.manager [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] [ 718.614565] env[62096]: INFO nova.compute.manager [None req-6d518a50-dadb-4b6d-9314-0f65d9990dab tempest-ServersNegativeTestMultiTenantJSON-1390208586 tempest-ServersNegativeTestMultiTenantJSON-1390208586-project-member] [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] Terminating instance [ 718.616115] env[62096]: DEBUG oslo_concurrency.lockutils [None req-6d518a50-dadb-4b6d-9314-0f65d9990dab tempest-ServersNegativeTestMultiTenantJSON-1390208586 tempest-ServersNegativeTestMultiTenantJSON-1390208586-project-member] Acquiring lock "refresh_cache-4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 718.634296] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99411c15-8254-4ff2-a9dc-a01fce54e154 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.642542] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11cedd8d-77c0-4d28-af17-a3ea591b6c23 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.672260] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8476a65b-9d29-4f75-810d-aaefb93496db {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.679595] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57f1df8a-52ed-46de-b284-723845145bb0 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.692563] env[62096]: DEBUG nova.compute.provider_tree [None req-4c22e505-acaf-4907-b777-48f6dcefcfb3 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 718.693063] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4c22e505-acaf-4907-b777-48f6dcefcfb3 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362-project-member] Expecting reply to msg 022fe58857514437a098b330612d58fa in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 718.700510] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 022fe58857514437a098b330612d58fa [ 718.871459] env[62096]: DEBUG nova.network.neutron [req-ff0d8fcb-e643-4e17-8d20-3a2ae6645a3d req-10d30044-5301-45e9-a7e0-66ed025d3c32 service nova] [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 718.970349] env[62096]: DEBUG oslo_concurrency.lockutils [None req-fdc6c660-999b-45f5-b0c3-7b576c288147 tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Releasing lock "refresh_cache-1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 718.970349] env[62096]: DEBUG nova.compute.manager [None req-fdc6c660-999b-45f5-b0c3-7b576c288147 tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 718.970349] env[62096]: DEBUG nova.compute.manager [None req-fdc6c660-999b-45f5-b0c3-7b576c288147 tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 718.970349] env[62096]: DEBUG nova.network.neutron [None req-fdc6c660-999b-45f5-b0c3-7b576c288147 tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 718.997482] env[62096]: DEBUG nova.network.neutron [None req-fdc6c660-999b-45f5-b0c3-7b576c288147 tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 718.998083] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-fdc6c660-999b-45f5-b0c3-7b576c288147 tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Expecting reply to msg 81f0dd3c370548e4a935ded900129a22 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 719.008617] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 81f0dd3c370548e4a935ded900129a22 [ 719.008691] env[62096]: DEBUG nova.network.neutron [req-ff0d8fcb-e643-4e17-8d20-3a2ae6645a3d req-10d30044-5301-45e9-a7e0-66ed025d3c32 service nova] [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 719.009165] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-ff0d8fcb-e643-4e17-8d20-3a2ae6645a3d req-10d30044-5301-45e9-a7e0-66ed025d3c32 service nova] Expecting reply to msg a5ee0fbc8c9d481c86a732afdb4ed9c2 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 719.016975] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a5ee0fbc8c9d481c86a732afdb4ed9c2 [ 719.195403] env[62096]: DEBUG nova.scheduler.client.report [None req-4c22e505-acaf-4907-b777-48f6dcefcfb3 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 719.198649] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4c22e505-acaf-4907-b777-48f6dcefcfb3 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362-project-member] Expecting reply to msg 096630ac65b247b3acfa26a8558e04a2 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 719.231651] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 096630ac65b247b3acfa26a8558e04a2 [ 719.325769] env[62096]: DEBUG oslo_concurrency.lockutils [None req-31b37f0f-ed04-4f12-864d-f77713fa80b1 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Acquiring lock "d5a6c627-bbab-49d4-a3bd-cb5b15264b18" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 719.326003] env[62096]: DEBUG oslo_concurrency.lockutils [None req-31b37f0f-ed04-4f12-864d-f77713fa80b1 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Lock "d5a6c627-bbab-49d4-a3bd-cb5b15264b18" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 719.500717] env[62096]: DEBUG nova.network.neutron [None req-fdc6c660-999b-45f5-b0c3-7b576c288147 tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 719.501391] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-fdc6c660-999b-45f5-b0c3-7b576c288147 tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Expecting reply to msg 7f57003e2f6d4e5fa46c652fdc450ede in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 719.509888] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7f57003e2f6d4e5fa46c652fdc450ede [ 719.510436] env[62096]: DEBUG oslo_concurrency.lockutils [req-ff0d8fcb-e643-4e17-8d20-3a2ae6645a3d req-10d30044-5301-45e9-a7e0-66ed025d3c32 service nova] Releasing lock "refresh_cache-4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 719.510936] env[62096]: DEBUG oslo_concurrency.lockutils [None req-6d518a50-dadb-4b6d-9314-0f65d9990dab tempest-ServersNegativeTestMultiTenantJSON-1390208586 tempest-ServersNegativeTestMultiTenantJSON-1390208586-project-member] Acquired lock "refresh_cache-4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 719.511020] env[62096]: DEBUG nova.network.neutron [None req-6d518a50-dadb-4b6d-9314-0f65d9990dab tempest-ServersNegativeTestMultiTenantJSON-1390208586 tempest-ServersNegativeTestMultiTenantJSON-1390208586-project-member] [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 719.511347] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-6d518a50-dadb-4b6d-9314-0f65d9990dab tempest-ServersNegativeTestMultiTenantJSON-1390208586 tempest-ServersNegativeTestMultiTenantJSON-1390208586-project-member] Expecting reply to msg edb94c14d45546bfaea10169b8243dd4 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 719.518344] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg edb94c14d45546bfaea10169b8243dd4 [ 719.701699] env[62096]: DEBUG oslo_concurrency.lockutils [None req-4c22e505-acaf-4907-b777-48f6dcefcfb3 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.975s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 719.702366] env[62096]: ERROR nova.compute.manager [None req-4c22e505-acaf-4907-b777-48f6dcefcfb3 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362-project-member] [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 2d7f49b1-591c-4fbb-8ac2-6e9c7b079525, please check neutron logs for more information. [ 719.702366] env[62096]: ERROR nova.compute.manager [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] Traceback (most recent call last): [ 719.702366] env[62096]: ERROR nova.compute.manager [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 719.702366] env[62096]: ERROR nova.compute.manager [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] self.driver.spawn(context, instance, image_meta, [ 719.702366] env[62096]: ERROR nova.compute.manager [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 719.702366] env[62096]: ERROR nova.compute.manager [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 719.702366] env[62096]: ERROR nova.compute.manager [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 719.702366] env[62096]: ERROR nova.compute.manager [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] vm_ref = self.build_virtual_machine(instance, [ 719.702366] env[62096]: ERROR nova.compute.manager [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 719.702366] env[62096]: ERROR nova.compute.manager [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] vif_infos = vmwarevif.get_vif_info(self._session, [ 719.702366] env[62096]: ERROR nova.compute.manager [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 719.702981] env[62096]: ERROR nova.compute.manager [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] for vif in network_info: [ 719.702981] env[62096]: ERROR nova.compute.manager [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 719.702981] env[62096]: ERROR nova.compute.manager [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] return self._sync_wrapper(fn, *args, **kwargs) [ 719.702981] env[62096]: ERROR nova.compute.manager [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 719.702981] env[62096]: ERROR nova.compute.manager [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] self.wait() [ 719.702981] env[62096]: ERROR nova.compute.manager [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 719.702981] env[62096]: ERROR nova.compute.manager [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] self[:] = self._gt.wait() [ 719.702981] env[62096]: ERROR nova.compute.manager [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 719.702981] env[62096]: ERROR nova.compute.manager [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] return self._exit_event.wait() [ 719.702981] env[62096]: ERROR nova.compute.manager [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 719.702981] env[62096]: ERROR nova.compute.manager [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] current.throw(*self._exc) [ 719.702981] env[62096]: ERROR nova.compute.manager [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 719.702981] env[62096]: ERROR nova.compute.manager [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] result = function(*args, **kwargs) [ 719.703603] env[62096]: ERROR nova.compute.manager [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 719.703603] env[62096]: ERROR nova.compute.manager [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] return func(*args, **kwargs) [ 719.703603] env[62096]: ERROR nova.compute.manager [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 719.703603] env[62096]: ERROR nova.compute.manager [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] raise e [ 719.703603] env[62096]: ERROR nova.compute.manager [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 719.703603] env[62096]: ERROR nova.compute.manager [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] nwinfo = self.network_api.allocate_for_instance( [ 719.703603] env[62096]: ERROR nova.compute.manager [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 719.703603] env[62096]: ERROR nova.compute.manager [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] created_port_ids = self._update_ports_for_instance( [ 719.703603] env[62096]: ERROR nova.compute.manager [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 719.703603] env[62096]: ERROR nova.compute.manager [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] with excutils.save_and_reraise_exception(): [ 719.703603] env[62096]: ERROR nova.compute.manager [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 719.703603] env[62096]: ERROR nova.compute.manager [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] self.force_reraise() [ 719.703603] env[62096]: ERROR nova.compute.manager [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 719.704526] env[62096]: ERROR nova.compute.manager [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] raise self.value [ 719.704526] env[62096]: ERROR nova.compute.manager [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 719.704526] env[62096]: ERROR nova.compute.manager [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] updated_port = self._update_port( [ 719.704526] env[62096]: ERROR nova.compute.manager [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 719.704526] env[62096]: ERROR nova.compute.manager [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] _ensure_no_port_binding_failure(port) [ 719.704526] env[62096]: ERROR nova.compute.manager [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 719.704526] env[62096]: ERROR nova.compute.manager [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] raise exception.PortBindingFailed(port_id=port['id']) [ 719.704526] env[62096]: ERROR nova.compute.manager [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] nova.exception.PortBindingFailed: Binding failed for port 2d7f49b1-591c-4fbb-8ac2-6e9c7b079525, please check neutron logs for more information. [ 719.704526] env[62096]: ERROR nova.compute.manager [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] [ 719.704526] env[62096]: DEBUG nova.compute.utils [None req-4c22e505-acaf-4907-b777-48f6dcefcfb3 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362-project-member] [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] Binding failed for port 2d7f49b1-591c-4fbb-8ac2-6e9c7b079525, please check neutron logs for more information. {{(pid=62096) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 719.705074] env[62096]: DEBUG oslo_concurrency.lockutils [None req-74a880f6-b7ea-4e56-b7f7-9e11c1a0c4ac tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.185s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 719.706242] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-74a880f6-b7ea-4e56-b7f7-9e11c1a0c4ac tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Expecting reply to msg f6a4bf105c2d456f810151aff093e467 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 719.707400] env[62096]: DEBUG nova.compute.manager [None req-4c22e505-acaf-4907-b777-48f6dcefcfb3 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362-project-member] [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] Build of instance 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4 was re-scheduled: Binding failed for port 2d7f49b1-591c-4fbb-8ac2-6e9c7b079525, please check neutron logs for more information. {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 719.707803] env[62096]: DEBUG nova.compute.manager [None req-4c22e505-acaf-4907-b777-48f6dcefcfb3 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362-project-member] [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] Unplugging VIFs for instance {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 719.708045] env[62096]: DEBUG oslo_concurrency.lockutils [None req-4c22e505-acaf-4907-b777-48f6dcefcfb3 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362-project-member] Acquiring lock "refresh_cache-0d89651a-8d34-428a-a9e7-d56ce9d3c5b4" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 719.708170] env[62096]: DEBUG oslo_concurrency.lockutils [None req-4c22e505-acaf-4907-b777-48f6dcefcfb3 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362-project-member] Acquired lock "refresh_cache-0d89651a-8d34-428a-a9e7-d56ce9d3c5b4" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 719.708327] env[62096]: DEBUG nova.network.neutron [None req-4c22e505-acaf-4907-b777-48f6dcefcfb3 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362-project-member] [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 719.708676] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4c22e505-acaf-4907-b777-48f6dcefcfb3 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362-project-member] Expecting reply to msg d3bf1ad17497459f89a558c29adb764b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 719.723851] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d3bf1ad17497459f89a558c29adb764b [ 719.766769] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f6a4bf105c2d456f810151aff093e467 [ 720.003603] env[62096]: INFO nova.compute.manager [None req-fdc6c660-999b-45f5-b0c3-7b576c288147 tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] [instance: 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9] Took 1.03 seconds to deallocate network for instance. [ 720.005673] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-fdc6c660-999b-45f5-b0c3-7b576c288147 tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Expecting reply to msg 5e70217b84eb4a05be6ca2ae934e0eaf in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 720.032659] env[62096]: DEBUG nova.network.neutron [None req-6d518a50-dadb-4b6d-9314-0f65d9990dab tempest-ServersNegativeTestMultiTenantJSON-1390208586 tempest-ServersNegativeTestMultiTenantJSON-1390208586-project-member] [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 720.038604] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5e70217b84eb4a05be6ca2ae934e0eaf [ 720.147031] env[62096]: DEBUG nova.network.neutron [None req-6d518a50-dadb-4b6d-9314-0f65d9990dab tempest-ServersNegativeTestMultiTenantJSON-1390208586 tempest-ServersNegativeTestMultiTenantJSON-1390208586-project-member] [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 720.147517] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-6d518a50-dadb-4b6d-9314-0f65d9990dab tempest-ServersNegativeTestMultiTenantJSON-1390208586 tempest-ServersNegativeTestMultiTenantJSON-1390208586-project-member] Expecting reply to msg 618beba939244e96ad2591f8ff682e8f in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 720.162632] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 618beba939244e96ad2591f8ff682e8f [ 720.217489] env[62096]: DEBUG oslo_concurrency.lockutils [None req-0c343c31-990f-417d-b18d-e0dbd941468a tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Acquiring lock "4057df30-37e2-415d-b6d2-e4211b95863d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 720.217735] env[62096]: DEBUG oslo_concurrency.lockutils [None req-0c343c31-990f-417d-b18d-e0dbd941468a tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Lock "4057df30-37e2-415d-b6d2-e4211b95863d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 720.241722] env[62096]: DEBUG nova.network.neutron [None req-4c22e505-acaf-4907-b777-48f6dcefcfb3 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362-project-member] [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 720.340664] env[62096]: DEBUG nova.network.neutron [None req-4c22e505-acaf-4907-b777-48f6dcefcfb3 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362-project-member] [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 720.341184] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4c22e505-acaf-4907-b777-48f6dcefcfb3 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362-project-member] Expecting reply to msg cd7d97a313ab49729fa3b5d2ee0805b6 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 720.350278] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cd7d97a313ab49729fa3b5d2ee0805b6 [ 720.372444] env[62096]: DEBUG nova.compute.manager [req-16d1089d-a6a4-457d-af26-d9cc07e40e71 req-4e2558d7-b92d-4ed5-a42f-300ae7b795ba service nova] [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] Received event network-vif-deleted-3d1a8a96-d849-473c-8572-e26b0c89c85a {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 720.510197] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-fdc6c660-999b-45f5-b0c3-7b576c288147 tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Expecting reply to msg a9378b7cf1854845a48e9258c0590b1d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 720.541551] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a9378b7cf1854845a48e9258c0590b1d [ 720.555318] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac736555-49b4-4d95-8f1b-d8949eb3255d {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.563692] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60076aba-c0c8-4cdb-8b9b-246fb15f8844 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.592180] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c75300b-cb42-494c-8f98-95ea3e2149d7 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.598885] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1e2add4-e150-4787-9f75-a1bb53711005 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.611315] env[62096]: DEBUG nova.compute.provider_tree [None req-74a880f6-b7ea-4e56-b7f7-9e11c1a0c4ac tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 720.611782] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-74a880f6-b7ea-4e56-b7f7-9e11c1a0c4ac tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Expecting reply to msg 3ef3638530d1407c9def40e946280619 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 720.618821] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3ef3638530d1407c9def40e946280619 [ 720.650182] env[62096]: DEBUG oslo_concurrency.lockutils [None req-6d518a50-dadb-4b6d-9314-0f65d9990dab tempest-ServersNegativeTestMultiTenantJSON-1390208586 tempest-ServersNegativeTestMultiTenantJSON-1390208586-project-member] Releasing lock "refresh_cache-4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 720.650569] env[62096]: DEBUG nova.compute.manager [None req-6d518a50-dadb-4b6d-9314-0f65d9990dab tempest-ServersNegativeTestMultiTenantJSON-1390208586 tempest-ServersNegativeTestMultiTenantJSON-1390208586-project-member] [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] Start destroying the instance on the hypervisor. {{(pid=62096) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 720.650751] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-6d518a50-dadb-4b6d-9314-0f65d9990dab tempest-ServersNegativeTestMultiTenantJSON-1390208586 tempest-ServersNegativeTestMultiTenantJSON-1390208586-project-member] [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] Destroying instance {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 720.651004] env[62096]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9f04a445-63c9-4291-bcd6-e92c5d2b8fd7 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.659617] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be4cbc03-fd13-4554-9d29-80a9800c543e {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.680255] env[62096]: WARNING nova.virt.vmwareapi.vmops [None req-6d518a50-dadb-4b6d-9314-0f65d9990dab tempest-ServersNegativeTestMultiTenantJSON-1390208586 tempest-ServersNegativeTestMultiTenantJSON-1390208586-project-member] [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe could not be found. [ 720.680489] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-6d518a50-dadb-4b6d-9314-0f65d9990dab tempest-ServersNegativeTestMultiTenantJSON-1390208586 tempest-ServersNegativeTestMultiTenantJSON-1390208586-project-member] [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] Instance destroyed {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 720.680668] env[62096]: INFO nova.compute.manager [None req-6d518a50-dadb-4b6d-9314-0f65d9990dab tempest-ServersNegativeTestMultiTenantJSON-1390208586 tempest-ServersNegativeTestMultiTenantJSON-1390208586-project-member] [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] Took 0.03 seconds to destroy the instance on the hypervisor. [ 720.680897] env[62096]: DEBUG oslo.service.loopingcall [None req-6d518a50-dadb-4b6d-9314-0f65d9990dab tempest-ServersNegativeTestMultiTenantJSON-1390208586 tempest-ServersNegativeTestMultiTenantJSON-1390208586-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 720.681112] env[62096]: DEBUG nova.compute.manager [-] [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 720.681206] env[62096]: DEBUG nova.network.neutron [-] [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 720.698563] env[62096]: DEBUG nova.network.neutron [-] [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 720.698998] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 5e1ad1105ba844348b69f327ce2cf983 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 720.705756] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5e1ad1105ba844348b69f327ce2cf983 [ 720.845094] env[62096]: DEBUG oslo_concurrency.lockutils [None req-4c22e505-acaf-4907-b777-48f6dcefcfb3 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362-project-member] Releasing lock "refresh_cache-0d89651a-8d34-428a-a9e7-d56ce9d3c5b4" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 720.845444] env[62096]: DEBUG nova.compute.manager [None req-4c22e505-acaf-4907-b777-48f6dcefcfb3 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 720.845516] env[62096]: DEBUG nova.compute.manager [None req-4c22e505-acaf-4907-b777-48f6dcefcfb3 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362-project-member] [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 720.845681] env[62096]: DEBUG nova.network.neutron [None req-4c22e505-acaf-4907-b777-48f6dcefcfb3 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362-project-member] [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 720.869341] env[62096]: DEBUG nova.network.neutron [None req-4c22e505-acaf-4907-b777-48f6dcefcfb3 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362-project-member] [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 720.869903] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4c22e505-acaf-4907-b777-48f6dcefcfb3 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362-project-member] Expecting reply to msg 17029c924d864332af80aa0438fe46ec in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 720.876706] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 17029c924d864332af80aa0438fe46ec [ 721.032471] env[62096]: INFO nova.scheduler.client.report [None req-fdc6c660-999b-45f5-b0c3-7b576c288147 tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Deleted allocations for instance 1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9 [ 721.039554] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-fdc6c660-999b-45f5-b0c3-7b576c288147 tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Expecting reply to msg 3f2ea4550aeb4de2a4ad9a90eadea63a in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 721.050503] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3f2ea4550aeb4de2a4ad9a90eadea63a [ 721.114342] env[62096]: DEBUG nova.scheduler.client.report [None req-74a880f6-b7ea-4e56-b7f7-9e11c1a0c4ac tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 721.116751] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-74a880f6-b7ea-4e56-b7f7-9e11c1a0c4ac tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Expecting reply to msg 288448854fe040dc998c0e62c306acaa in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 721.126341] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 288448854fe040dc998c0e62c306acaa [ 721.200925] env[62096]: DEBUG nova.network.neutron [-] [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 721.201254] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 2773189d3172471984a9cb64923fe6e8 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 721.210188] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2773189d3172471984a9cb64923fe6e8 [ 721.371526] env[62096]: DEBUG nova.network.neutron [None req-4c22e505-acaf-4907-b777-48f6dcefcfb3 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362-project-member] [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 721.372125] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4c22e505-acaf-4907-b777-48f6dcefcfb3 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362-project-member] Expecting reply to msg e67216394ba8453e9ee5a233ec43a33c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 721.380438] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e67216394ba8453e9ee5a233ec43a33c [ 721.541505] env[62096]: DEBUG oslo_concurrency.lockutils [None req-fdc6c660-999b-45f5-b0c3-7b576c288147 tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Lock "1fed3224-2f97-41f7-aa4a-d07c8ef6d9e9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 135.522s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 721.542107] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-fce0ab80-047e-4dc5-9e51-67218a6b1057 tempest-AttachInterfacesUnderV243Test-218968522 tempest-AttachInterfacesUnderV243Test-218968522-project-member] Expecting reply to msg 4bdbf48b88194074b20a0e7e076b2489 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 721.551196] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4bdbf48b88194074b20a0e7e076b2489 [ 721.618722] env[62096]: DEBUG oslo_concurrency.lockutils [None req-74a880f6-b7ea-4e56-b7f7-9e11c1a0c4ac tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.914s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 721.619312] env[62096]: ERROR nova.compute.manager [None req-74a880f6-b7ea-4e56-b7f7-9e11c1a0c4ac tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port d00a01dc-5cbe-464a-939f-07ed16a2758d, please check neutron logs for more information. [ 721.619312] env[62096]: ERROR nova.compute.manager [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] Traceback (most recent call last): [ 721.619312] env[62096]: ERROR nova.compute.manager [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 721.619312] env[62096]: ERROR nova.compute.manager [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] self.driver.spawn(context, instance, image_meta, [ 721.619312] env[62096]: ERROR nova.compute.manager [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 721.619312] env[62096]: ERROR nova.compute.manager [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 721.619312] env[62096]: ERROR nova.compute.manager [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 721.619312] env[62096]: ERROR nova.compute.manager [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] vm_ref = self.build_virtual_machine(instance, [ 721.619312] env[62096]: ERROR nova.compute.manager [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 721.619312] env[62096]: ERROR nova.compute.manager [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] vif_infos = vmwarevif.get_vif_info(self._session, [ 721.619312] env[62096]: ERROR nova.compute.manager [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 721.619715] env[62096]: ERROR nova.compute.manager [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] for vif in network_info: [ 721.619715] env[62096]: ERROR nova.compute.manager [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 721.619715] env[62096]: ERROR nova.compute.manager [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] return self._sync_wrapper(fn, *args, **kwargs) [ 721.619715] env[62096]: ERROR nova.compute.manager [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 721.619715] env[62096]: ERROR nova.compute.manager [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] self.wait() [ 721.619715] env[62096]: ERROR nova.compute.manager [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 721.619715] env[62096]: ERROR nova.compute.manager [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] self[:] = self._gt.wait() [ 721.619715] env[62096]: ERROR nova.compute.manager [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 721.619715] env[62096]: ERROR nova.compute.manager [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] return self._exit_event.wait() [ 721.619715] env[62096]: ERROR nova.compute.manager [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 721.619715] env[62096]: ERROR nova.compute.manager [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] current.throw(*self._exc) [ 721.619715] env[62096]: ERROR nova.compute.manager [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 721.619715] env[62096]: ERROR nova.compute.manager [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] result = function(*args, **kwargs) [ 721.620160] env[62096]: ERROR nova.compute.manager [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 721.620160] env[62096]: ERROR nova.compute.manager [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] return func(*args, **kwargs) [ 721.620160] env[62096]: ERROR nova.compute.manager [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 721.620160] env[62096]: ERROR nova.compute.manager [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] raise e [ 721.620160] env[62096]: ERROR nova.compute.manager [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 721.620160] env[62096]: ERROR nova.compute.manager [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] nwinfo = self.network_api.allocate_for_instance( [ 721.620160] env[62096]: ERROR nova.compute.manager [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 721.620160] env[62096]: ERROR nova.compute.manager [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] created_port_ids = self._update_ports_for_instance( [ 721.620160] env[62096]: ERROR nova.compute.manager [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 721.620160] env[62096]: ERROR nova.compute.manager [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] with excutils.save_and_reraise_exception(): [ 721.620160] env[62096]: ERROR nova.compute.manager [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 721.620160] env[62096]: ERROR nova.compute.manager [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] self.force_reraise() [ 721.620160] env[62096]: ERROR nova.compute.manager [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 721.620595] env[62096]: ERROR nova.compute.manager [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] raise self.value [ 721.620595] env[62096]: ERROR nova.compute.manager [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 721.620595] env[62096]: ERROR nova.compute.manager [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] updated_port = self._update_port( [ 721.620595] env[62096]: ERROR nova.compute.manager [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 721.620595] env[62096]: ERROR nova.compute.manager [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] _ensure_no_port_binding_failure(port) [ 721.620595] env[62096]: ERROR nova.compute.manager [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 721.620595] env[62096]: ERROR nova.compute.manager [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] raise exception.PortBindingFailed(port_id=port['id']) [ 721.620595] env[62096]: ERROR nova.compute.manager [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] nova.exception.PortBindingFailed: Binding failed for port d00a01dc-5cbe-464a-939f-07ed16a2758d, please check neutron logs for more information. [ 721.620595] env[62096]: ERROR nova.compute.manager [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] [ 721.620595] env[62096]: DEBUG nova.compute.utils [None req-74a880f6-b7ea-4e56-b7f7-9e11c1a0c4ac tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] Binding failed for port d00a01dc-5cbe-464a-939f-07ed16a2758d, please check neutron logs for more information. {{(pid=62096) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 721.621226] env[62096]: DEBUG oslo_concurrency.lockutils [None req-31582930-cf6e-43cb-a675-23470985f47b tempest-ServerActionsV293TestJSON-192366267 tempest-ServerActionsV293TestJSON-192366267-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.635s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 721.622719] env[62096]: INFO nova.compute.claims [None req-31582930-cf6e-43cb-a675-23470985f47b tempest-ServerActionsV293TestJSON-192366267 tempest-ServerActionsV293TestJSON-192366267-project-member] [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 721.624438] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-31582930-cf6e-43cb-a675-23470985f47b tempest-ServerActionsV293TestJSON-192366267 tempest-ServerActionsV293TestJSON-192366267-project-member] Expecting reply to msg 364169e7cfab46abad2758b91563d59c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 721.625640] env[62096]: DEBUG nova.compute.manager [None req-74a880f6-b7ea-4e56-b7f7-9e11c1a0c4ac tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] Build of instance ac8746f4-95b0-440e-bc3e-a92457ed664f was re-scheduled: Binding failed for port d00a01dc-5cbe-464a-939f-07ed16a2758d, please check neutron logs for more information. {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 721.626057] env[62096]: DEBUG nova.compute.manager [None req-74a880f6-b7ea-4e56-b7f7-9e11c1a0c4ac tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] Unplugging VIFs for instance {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 721.626275] env[62096]: DEBUG oslo_concurrency.lockutils [None req-74a880f6-b7ea-4e56-b7f7-9e11c1a0c4ac tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Acquiring lock "refresh_cache-ac8746f4-95b0-440e-bc3e-a92457ed664f" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 721.626421] env[62096]: DEBUG oslo_concurrency.lockutils [None req-74a880f6-b7ea-4e56-b7f7-9e11c1a0c4ac tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Acquired lock "refresh_cache-ac8746f4-95b0-440e-bc3e-a92457ed664f" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 721.626576] env[62096]: DEBUG nova.network.neutron [None req-74a880f6-b7ea-4e56-b7f7-9e11c1a0c4ac tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 721.626921] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-74a880f6-b7ea-4e56-b7f7-9e11c1a0c4ac tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Expecting reply to msg 6f24512d629549d48fadc7f1fe26a91c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 721.632782] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6f24512d629549d48fadc7f1fe26a91c [ 721.658464] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 364169e7cfab46abad2758b91563d59c [ 721.703479] env[62096]: INFO nova.compute.manager [-] [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] Took 1.02 seconds to deallocate network for instance. [ 721.705862] env[62096]: DEBUG nova.compute.claims [None req-6d518a50-dadb-4b6d-9314-0f65d9990dab tempest-ServersNegativeTestMultiTenantJSON-1390208586 tempest-ServersNegativeTestMultiTenantJSON-1390208586-project-member] [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] Aborting claim: {{(pid=62096) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 721.706047] env[62096]: DEBUG oslo_concurrency.lockutils [None req-6d518a50-dadb-4b6d-9314-0f65d9990dab tempest-ServersNegativeTestMultiTenantJSON-1390208586 tempest-ServersNegativeTestMultiTenantJSON-1390208586-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 721.874897] env[62096]: INFO nova.compute.manager [None req-4c22e505-acaf-4907-b777-48f6dcefcfb3 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362-project-member] [instance: 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4] Took 1.03 seconds to deallocate network for instance. [ 721.876735] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4c22e505-acaf-4907-b777-48f6dcefcfb3 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362-project-member] Expecting reply to msg d051a3ce639743ddab4b52889c51c2b5 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 721.911401] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d051a3ce639743ddab4b52889c51c2b5 [ 722.044116] env[62096]: DEBUG nova.compute.manager [None req-fce0ab80-047e-4dc5-9e51-67218a6b1057 tempest-AttachInterfacesUnderV243Test-218968522 tempest-AttachInterfacesUnderV243Test-218968522-project-member] [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] Starting instance... {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 722.045768] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-fce0ab80-047e-4dc5-9e51-67218a6b1057 tempest-AttachInterfacesUnderV243Test-218968522 tempest-AttachInterfacesUnderV243Test-218968522-project-member] Expecting reply to msg 69e37a8ca34e4ec59dd3a5b73fe9eb87 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 722.085920] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 69e37a8ca34e4ec59dd3a5b73fe9eb87 [ 722.130097] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-31582930-cf6e-43cb-a675-23470985f47b tempest-ServerActionsV293TestJSON-192366267 tempest-ServerActionsV293TestJSON-192366267-project-member] Expecting reply to msg 73bb986f74a44c9a9567f39c22ce72a5 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 722.138126] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 73bb986f74a44c9a9567f39c22ce72a5 [ 722.147468] env[62096]: DEBUG nova.network.neutron [None req-74a880f6-b7ea-4e56-b7f7-9e11c1a0c4ac tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 722.260252] env[62096]: DEBUG nova.network.neutron [None req-74a880f6-b7ea-4e56-b7f7-9e11c1a0c4ac tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 722.260771] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-74a880f6-b7ea-4e56-b7f7-9e11c1a0c4ac tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Expecting reply to msg 726ff7ee4f9a42dcb97680c49b7ef539 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 722.268398] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 726ff7ee4f9a42dcb97680c49b7ef539 [ 722.380790] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4c22e505-acaf-4907-b777-48f6dcefcfb3 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362-project-member] Expecting reply to msg 48b08183f19c49db90ac5a1583b0673b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 722.411786] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 48b08183f19c49db90ac5a1583b0673b [ 722.563695] env[62096]: DEBUG oslo_concurrency.lockutils [None req-fce0ab80-047e-4dc5-9e51-67218a6b1057 tempest-AttachInterfacesUnderV243Test-218968522 tempest-AttachInterfacesUnderV243Test-218968522-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 722.762532] env[62096]: DEBUG oslo_concurrency.lockutils [None req-74a880f6-b7ea-4e56-b7f7-9e11c1a0c4ac tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Releasing lock "refresh_cache-ac8746f4-95b0-440e-bc3e-a92457ed664f" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 722.762775] env[62096]: DEBUG nova.compute.manager [None req-74a880f6-b7ea-4e56-b7f7-9e11c1a0c4ac tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 722.762960] env[62096]: DEBUG nova.compute.manager [None req-74a880f6-b7ea-4e56-b7f7-9e11c1a0c4ac tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 722.763162] env[62096]: DEBUG nova.network.neutron [None req-74a880f6-b7ea-4e56-b7f7-9e11c1a0c4ac tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 722.777675] env[62096]: DEBUG nova.network.neutron [None req-74a880f6-b7ea-4e56-b7f7-9e11c1a0c4ac tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 722.778254] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-74a880f6-b7ea-4e56-b7f7-9e11c1a0c4ac tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Expecting reply to msg e861a8e803a3468989533e1d4fec9e37 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 722.785456] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e861a8e803a3468989533e1d4fec9e37 [ 722.903615] env[62096]: INFO nova.scheduler.client.report [None req-4c22e505-acaf-4907-b777-48f6dcefcfb3 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362-project-member] Deleted allocations for instance 0d89651a-8d34-428a-a9e7-d56ce9d3c5b4 [ 722.911978] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4c22e505-acaf-4907-b777-48f6dcefcfb3 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362-project-member] Expecting reply to msg 02f4da8561c747f8aceb4f8a5ad0f06b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 722.925186] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 02f4da8561c747f8aceb4f8a5ad0f06b [ 722.962507] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2af47a4a-1103-4ad1-a27e-9669b0e3336b {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.970393] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30300caa-5c9d-4c4e-afbb-0095b932a750 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.000332] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86a7f79c-b4b4-4af8-a194-dd3fe460a3cd {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.007917] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-446c990c-65f0-43b1-a086-cef402474e0b {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.021157] env[62096]: DEBUG nova.compute.provider_tree [None req-31582930-cf6e-43cb-a675-23470985f47b tempest-ServerActionsV293TestJSON-192366267 tempest-ServerActionsV293TestJSON-192366267-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 723.021581] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-31582930-cf6e-43cb-a675-23470985f47b tempest-ServerActionsV293TestJSON-192366267 tempest-ServerActionsV293TestJSON-192366267-project-member] Expecting reply to msg b1cdef123ffb407d9b2b42123ddd05f8 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 723.029069] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b1cdef123ffb407d9b2b42123ddd05f8 [ 723.280824] env[62096]: DEBUG nova.network.neutron [None req-74a880f6-b7ea-4e56-b7f7-9e11c1a0c4ac tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 723.281400] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-74a880f6-b7ea-4e56-b7f7-9e11c1a0c4ac tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Expecting reply to msg 93d15d2760974a619c7e4edc574c9288 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 723.290206] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 93d15d2760974a619c7e4edc574c9288 [ 723.414452] env[62096]: DEBUG oslo_concurrency.lockutils [None req-4c22e505-acaf-4907-b777-48f6dcefcfb3 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362 tempest-FloatingIPsAssociationNegativeTestJSON-1636745362-project-member] Lock "0d89651a-8d34-428a-a9e7-d56ce9d3c5b4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 135.633s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 723.415207] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d5f614b0-b494-4335-b00e-6b8de6a76cbe tempest-ServersWithSpecificFlavorTestJSON-68500714 tempest-ServersWithSpecificFlavorTestJSON-68500714-project-member] Expecting reply to msg ebd37c78826447f8babc844134f4acd5 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 723.434464] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ebd37c78826447f8babc844134f4acd5 [ 723.524760] env[62096]: DEBUG nova.scheduler.client.report [None req-31582930-cf6e-43cb-a675-23470985f47b tempest-ServerActionsV293TestJSON-192366267 tempest-ServerActionsV293TestJSON-192366267-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 723.527180] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-31582930-cf6e-43cb-a675-23470985f47b tempest-ServerActionsV293TestJSON-192366267 tempest-ServerActionsV293TestJSON-192366267-project-member] Expecting reply to msg 3dad90d6adaf486e82269dd099aa95e4 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 723.542067] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3dad90d6adaf486e82269dd099aa95e4 [ 723.784243] env[62096]: INFO nova.compute.manager [None req-74a880f6-b7ea-4e56-b7f7-9e11c1a0c4ac tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] [instance: ac8746f4-95b0-440e-bc3e-a92457ed664f] Took 1.02 seconds to deallocate network for instance. [ 723.785992] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-74a880f6-b7ea-4e56-b7f7-9e11c1a0c4ac tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Expecting reply to msg 5a781a72e1c14f0f8ec121e5dc03e842 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 723.825948] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5a781a72e1c14f0f8ec121e5dc03e842 [ 723.917954] env[62096]: DEBUG nova.compute.manager [None req-d5f614b0-b494-4335-b00e-6b8de6a76cbe tempest-ServersWithSpecificFlavorTestJSON-68500714 tempest-ServersWithSpecificFlavorTestJSON-68500714-project-member] [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] Starting instance... {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 723.920146] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d5f614b0-b494-4335-b00e-6b8de6a76cbe tempest-ServersWithSpecificFlavorTestJSON-68500714 tempest-ServersWithSpecificFlavorTestJSON-68500714-project-member] Expecting reply to msg 16f9a87a0b9742ce92589bbf9e5cddda in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 723.971378] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 16f9a87a0b9742ce92589bbf9e5cddda [ 724.036598] env[62096]: DEBUG oslo_concurrency.lockutils [None req-31582930-cf6e-43cb-a675-23470985f47b tempest-ServerActionsV293TestJSON-192366267 tempest-ServerActionsV293TestJSON-192366267-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.413s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 724.036598] env[62096]: DEBUG nova.compute.manager [None req-31582930-cf6e-43cb-a675-23470985f47b tempest-ServerActionsV293TestJSON-192366267 tempest-ServerActionsV293TestJSON-192366267-project-member] [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] Start building networks asynchronously for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 724.036598] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-31582930-cf6e-43cb-a675-23470985f47b tempest-ServerActionsV293TestJSON-192366267 tempest-ServerActionsV293TestJSON-192366267-project-member] Expecting reply to msg 6bec4e98a58b42f589751c699d4a3096 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 724.037379] env[62096]: DEBUG oslo_concurrency.lockutils [None req-5b2f59bb-27e5-498c-aa13-1fed23947587 tempest-ServerRescueTestJSON-190058497 tempest-ServerRescueTestJSON-190058497-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.099s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 724.038786] env[62096]: INFO nova.compute.claims [None req-5b2f59bb-27e5-498c-aa13-1fed23947587 tempest-ServerRescueTestJSON-190058497 tempest-ServerRescueTestJSON-190058497-project-member] [instance: d38be540-9cd0-428a-b10d-313d2d464b25] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 724.040269] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-5b2f59bb-27e5-498c-aa13-1fed23947587 tempest-ServerRescueTestJSON-190058497 tempest-ServerRescueTestJSON-190058497-project-member] Expecting reply to msg 5987c736eecd48669b2ad4fc700631a7 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 724.080951] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6bec4e98a58b42f589751c699d4a3096 [ 724.084034] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5987c736eecd48669b2ad4fc700631a7 [ 724.290481] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-74a880f6-b7ea-4e56-b7f7-9e11c1a0c4ac tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Expecting reply to msg 02478a1f8f304f93ad348c0eb8fb13cf in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 724.327961] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 02478a1f8f304f93ad348c0eb8fb13cf [ 724.445776] env[62096]: DEBUG oslo_concurrency.lockutils [None req-d5f614b0-b494-4335-b00e-6b8de6a76cbe tempest-ServersWithSpecificFlavorTestJSON-68500714 tempest-ServersWithSpecificFlavorTestJSON-68500714-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 724.545182] env[62096]: DEBUG nova.compute.utils [None req-31582930-cf6e-43cb-a675-23470985f47b tempest-ServerActionsV293TestJSON-192366267 tempest-ServerActionsV293TestJSON-192366267-project-member] Using /dev/sd instead of None {{(pid=62096) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 724.545830] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-31582930-cf6e-43cb-a675-23470985f47b tempest-ServerActionsV293TestJSON-192366267 tempest-ServerActionsV293TestJSON-192366267-project-member] Expecting reply to msg 75bc785862e14e38b89ed765118b870a in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 724.547815] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-5b2f59bb-27e5-498c-aa13-1fed23947587 tempest-ServerRescueTestJSON-190058497 tempest-ServerRescueTestJSON-190058497-project-member] Expecting reply to msg 498d5e72e19b4c849d971f13609f1dc4 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 724.549851] env[62096]: DEBUG nova.compute.manager [None req-31582930-cf6e-43cb-a675-23470985f47b tempest-ServerActionsV293TestJSON-192366267 tempest-ServerActionsV293TestJSON-192366267-project-member] [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] Allocating IP information in the background. {{(pid=62096) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 724.549851] env[62096]: DEBUG nova.network.neutron [None req-31582930-cf6e-43cb-a675-23470985f47b tempest-ServerActionsV293TestJSON-192366267 tempest-ServerActionsV293TestJSON-192366267-project-member] [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] allocate_for_instance() {{(pid=62096) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 724.560087] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 75bc785862e14e38b89ed765118b870a [ 724.561497] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 498d5e72e19b4c849d971f13609f1dc4 [ 724.588563] env[62096]: DEBUG nova.policy [None req-31582930-cf6e-43cb-a675-23470985f47b tempest-ServerActionsV293TestJSON-192366267 tempest-ServerActionsV293TestJSON-192366267-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '09c3f68c609f4f78909ef211c16b0b51', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'df3adfc720b147feb8cf4cfb26980111', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62096) authorize /opt/stack/nova/nova/policy.py:203}} [ 724.812127] env[62096]: INFO nova.scheduler.client.report [None req-74a880f6-b7ea-4e56-b7f7-9e11c1a0c4ac tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Deleted allocations for instance ac8746f4-95b0-440e-bc3e-a92457ed664f [ 724.819070] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-74a880f6-b7ea-4e56-b7f7-9e11c1a0c4ac tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Expecting reply to msg a221f2b533ff4471a66093ab9b5993f5 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 724.835330] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a221f2b533ff4471a66093ab9b5993f5 [ 725.031041] env[62096]: DEBUG nova.network.neutron [None req-31582930-cf6e-43cb-a675-23470985f47b tempest-ServerActionsV293TestJSON-192366267 tempest-ServerActionsV293TestJSON-192366267-project-member] [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] Successfully created port: c981a4c6-a985-4393-9bca-e1f2dd45c026 {{(pid=62096) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 725.051000] env[62096]: DEBUG nova.compute.manager [None req-31582930-cf6e-43cb-a675-23470985f47b tempest-ServerActionsV293TestJSON-192366267 tempest-ServerActionsV293TestJSON-192366267-project-member] [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] Start building block device mappings for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 725.052868] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-31582930-cf6e-43cb-a675-23470985f47b tempest-ServerActionsV293TestJSON-192366267 tempest-ServerActionsV293TestJSON-192366267-project-member] Expecting reply to msg b7c87ad993b640da8f55fd9e228d9fdf in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 725.111439] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b7c87ad993b640da8f55fd9e228d9fdf [ 725.320930] env[62096]: DEBUG oslo_concurrency.lockutils [None req-74a880f6-b7ea-4e56-b7f7-9e11c1a0c4ac tempest-ServerRescueNegativeTestJSON-1042829532 tempest-ServerRescueNegativeTestJSON-1042829532-project-member] Lock "ac8746f4-95b0-440e-bc3e-a92457ed664f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 136.722s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 725.321516] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a94ae5da-1291-4194-a0a7-fa8d95eb4821 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg ec534cd115974fdaad2e43889790d172 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 725.333773] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ec534cd115974fdaad2e43889790d172 [ 725.476894] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28e4483f-2fb8-41f3-9f12-8a384a3fe804 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.486476] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e9d383b-0e50-4006-b5d4-fa48b4e074af {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.524977] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a8ce205-4109-4173-b046-f074bae8ea4c {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.534826] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b3769a5-c2e0-4a49-b788-7a063987753b {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.553498] env[62096]: DEBUG nova.compute.provider_tree [None req-5b2f59bb-27e5-498c-aa13-1fed23947587 tempest-ServerRescueTestJSON-190058497 tempest-ServerRescueTestJSON-190058497-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 725.554050] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-5b2f59bb-27e5-498c-aa13-1fed23947587 tempest-ServerRescueTestJSON-190058497 tempest-ServerRescueTestJSON-190058497-project-member] Expecting reply to msg a334571f78074e94b1b2557df2768789 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 725.561020] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a334571f78074e94b1b2557df2768789 [ 725.567423] env[62096]: INFO nova.virt.block_device [None req-31582930-cf6e-43cb-a675-23470985f47b tempest-ServerActionsV293TestJSON-192366267 tempest-ServerActionsV293TestJSON-192366267-project-member] [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] Booting with volume 1bcec79e-6a1d-47aa-98b9-832fd12e716a at /dev/sda [ 725.608560] env[62096]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b6415b2f-5f70-4257-9b61-0cf592cfaf2e {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.617291] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ead7aa07-2c97-4641-a628-6c67d2162ec0 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.638063] env[62096]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-da2788c5-9b2c-4fec-a65a-6b5c32659ff3 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.645784] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec84176a-9291-41b7-8edb-6bcd7f8dc31f {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.667836] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1e89406-a88a-4470-8442-d46a64e3bf15 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.674001] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed34a44a-d5c6-48ef-947a-0ac3c44bb68c {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.687935] env[62096]: DEBUG nova.virt.block_device [None req-31582930-cf6e-43cb-a675-23470985f47b tempest-ServerActionsV293TestJSON-192366267 tempest-ServerActionsV293TestJSON-192366267-project-member] [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] Updating existing volume attachment record: dfb70024-a9fe-4216-baf1-d90f4be31779 {{(pid=62096) _volume_attach /opt/stack/nova/nova/virt/block_device.py:665}} [ 725.823679] env[62096]: DEBUG nova.compute.manager [None req-a94ae5da-1291-4194-a0a7-fa8d95eb4821 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] Starting instance... {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 725.831391] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a94ae5da-1291-4194-a0a7-fa8d95eb4821 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg 56a6cc0974f745daaa442ad51c572bde in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 725.875752] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 56a6cc0974f745daaa442ad51c572bde [ 726.068521] env[62096]: DEBUG nova.scheduler.client.report [None req-5b2f59bb-27e5-498c-aa13-1fed23947587 tempest-ServerRescueTestJSON-190058497 tempest-ServerRescueTestJSON-190058497-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 726.068521] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-5b2f59bb-27e5-498c-aa13-1fed23947587 tempest-ServerRescueTestJSON-190058497 tempest-ServerRescueTestJSON-190058497-project-member] Expecting reply to msg ed02fc2e80554220b33740c18f3f1a13 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 726.079936] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ed02fc2e80554220b33740c18f3f1a13 [ 726.176550] env[62096]: DEBUG nova.compute.manager [req-7ff3cb17-c2bc-48ec-a274-65e5c606be26 req-b9bb63bd-60fa-4c62-b9ea-c5fbec984c53 service nova] [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] Received event network-changed-c981a4c6-a985-4393-9bca-e1f2dd45c026 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 726.176550] env[62096]: DEBUG nova.compute.manager [req-7ff3cb17-c2bc-48ec-a274-65e5c606be26 req-b9bb63bd-60fa-4c62-b9ea-c5fbec984c53 service nova] [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] Refreshing instance network info cache due to event network-changed-c981a4c6-a985-4393-9bca-e1f2dd45c026. {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 726.176550] env[62096]: DEBUG oslo_concurrency.lockutils [req-7ff3cb17-c2bc-48ec-a274-65e5c606be26 req-b9bb63bd-60fa-4c62-b9ea-c5fbec984c53 service nova] Acquiring lock "refresh_cache-03570c3d-3ca5-495d-8a52-2f86b280f667" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 726.176550] env[62096]: DEBUG oslo_concurrency.lockutils [req-7ff3cb17-c2bc-48ec-a274-65e5c606be26 req-b9bb63bd-60fa-4c62-b9ea-c5fbec984c53 service nova] Acquired lock "refresh_cache-03570c3d-3ca5-495d-8a52-2f86b280f667" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 726.176550] env[62096]: DEBUG nova.network.neutron [req-7ff3cb17-c2bc-48ec-a274-65e5c606be26 req-b9bb63bd-60fa-4c62-b9ea-c5fbec984c53 service nova] [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] Refreshing network info cache for port c981a4c6-a985-4393-9bca-e1f2dd45c026 {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 726.176921] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-7ff3cb17-c2bc-48ec-a274-65e5c606be26 req-b9bb63bd-60fa-4c62-b9ea-c5fbec984c53 service nova] Expecting reply to msg b1bde4d7f34c4cb4bb9307f41dc9ae2c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 726.184789] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b1bde4d7f34c4cb4bb9307f41dc9ae2c [ 726.238191] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-31582930-cf6e-43cb-a675-23470985f47b tempest-ServerActionsV293TestJSON-192366267 tempest-ServerActionsV293TestJSON-192366267-project-member] Expecting reply to msg 657121974dbb41af8df5c9bae7f43349 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 726.249665] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 657121974dbb41af8df5c9bae7f43349 [ 726.353409] env[62096]: DEBUG oslo_concurrency.lockutils [None req-a94ae5da-1291-4194-a0a7-fa8d95eb4821 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 726.427362] env[62096]: ERROR nova.compute.manager [None req-31582930-cf6e-43cb-a675-23470985f47b tempest-ServerActionsV293TestJSON-192366267 tempest-ServerActionsV293TestJSON-192366267-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port c981a4c6-a985-4393-9bca-e1f2dd45c026, please check neutron logs for more information. [ 726.427362] env[62096]: ERROR nova.compute.manager Traceback (most recent call last): [ 726.427362] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 726.427362] env[62096]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 726.427362] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 726.427362] env[62096]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 726.427362] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 726.427362] env[62096]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 726.427362] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 726.427362] env[62096]: ERROR nova.compute.manager self.force_reraise() [ 726.427362] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 726.427362] env[62096]: ERROR nova.compute.manager raise self.value [ 726.427362] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 726.427362] env[62096]: ERROR nova.compute.manager updated_port = self._update_port( [ 726.427362] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 726.427362] env[62096]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 726.427830] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 726.427830] env[62096]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 726.427830] env[62096]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port c981a4c6-a985-4393-9bca-e1f2dd45c026, please check neutron logs for more information. [ 726.427830] env[62096]: ERROR nova.compute.manager [ 726.427830] env[62096]: Traceback (most recent call last): [ 726.427830] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 726.427830] env[62096]: listener.cb(fileno) [ 726.427830] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 726.427830] env[62096]: result = function(*args, **kwargs) [ 726.427830] env[62096]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 726.427830] env[62096]: return func(*args, **kwargs) [ 726.427830] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 726.427830] env[62096]: raise e [ 726.427830] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 726.427830] env[62096]: nwinfo = self.network_api.allocate_for_instance( [ 726.427830] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 726.427830] env[62096]: created_port_ids = self._update_ports_for_instance( [ 726.427830] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 726.427830] env[62096]: with excutils.save_and_reraise_exception(): [ 726.427830] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 726.427830] env[62096]: self.force_reraise() [ 726.427830] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 726.427830] env[62096]: raise self.value [ 726.427830] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 726.427830] env[62096]: updated_port = self._update_port( [ 726.427830] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 726.427830] env[62096]: _ensure_no_port_binding_failure(port) [ 726.427830] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 726.427830] env[62096]: raise exception.PortBindingFailed(port_id=port['id']) [ 726.428848] env[62096]: nova.exception.PortBindingFailed: Binding failed for port c981a4c6-a985-4393-9bca-e1f2dd45c026, please check neutron logs for more information. [ 726.428848] env[62096]: Removing descriptor: 16 [ 726.563097] env[62096]: DEBUG oslo_concurrency.lockutils [None req-5b2f59bb-27e5-498c-aa13-1fed23947587 tempest-ServerRescueTestJSON-190058497 tempest-ServerRescueTestJSON-190058497-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.525s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 726.563097] env[62096]: DEBUG nova.compute.manager [None req-5b2f59bb-27e5-498c-aa13-1fed23947587 tempest-ServerRescueTestJSON-190058497 tempest-ServerRescueTestJSON-190058497-project-member] [instance: d38be540-9cd0-428a-b10d-313d2d464b25] Start building networks asynchronously for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 726.564820] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-5b2f59bb-27e5-498c-aa13-1fed23947587 tempest-ServerRescueTestJSON-190058497 tempest-ServerRescueTestJSON-190058497-project-member] Expecting reply to msg fe301d04378948da91057643ec6e1658 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 726.566588] env[62096]: DEBUG oslo_concurrency.lockutils [None req-f283c87f-a0d6-46b3-ba4c-0ee08c3c4930 tempest-ImagesOneServerNegativeTestJSON-1092536401 tempest-ImagesOneServerNegativeTestJSON-1092536401-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 13.881s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 726.568462] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-f283c87f-a0d6-46b3-ba4c-0ee08c3c4930 tempest-ImagesOneServerNegativeTestJSON-1092536401 tempest-ImagesOneServerNegativeTestJSON-1092536401-project-member] Expecting reply to msg 1025dcb00d8d4d6fba082e0cde4db19f in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 726.603520] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1025dcb00d8d4d6fba082e0cde4db19f [ 726.605942] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fe301d04378948da91057643ec6e1658 [ 726.692197] env[62096]: DEBUG nova.network.neutron [req-7ff3cb17-c2bc-48ec-a274-65e5c606be26 req-b9bb63bd-60fa-4c62-b9ea-c5fbec984c53 service nova] [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 726.779783] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-31582930-cf6e-43cb-a675-23470985f47b tempest-ServerActionsV293TestJSON-192366267 tempest-ServerActionsV293TestJSON-192366267-project-member] Expecting reply to msg ffd4e13ab39549ca8790c0d025eb0dfe in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 726.788596] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ffd4e13ab39549ca8790c0d025eb0dfe [ 726.841183] env[62096]: DEBUG nova.network.neutron [req-7ff3cb17-c2bc-48ec-a274-65e5c606be26 req-b9bb63bd-60fa-4c62-b9ea-c5fbec984c53 service nova] [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 726.841183] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-7ff3cb17-c2bc-48ec-a274-65e5c606be26 req-b9bb63bd-60fa-4c62-b9ea-c5fbec984c53 service nova] Expecting reply to msg 1cbacae7b8c042f7b4922580b5b1989e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 726.849630] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1cbacae7b8c042f7b4922580b5b1989e [ 727.073117] env[62096]: DEBUG nova.compute.utils [None req-5b2f59bb-27e5-498c-aa13-1fed23947587 tempest-ServerRescueTestJSON-190058497 tempest-ServerRescueTestJSON-190058497-project-member] Using /dev/sd instead of None {{(pid=62096) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 727.073117] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-5b2f59bb-27e5-498c-aa13-1fed23947587 tempest-ServerRescueTestJSON-190058497 tempest-ServerRescueTestJSON-190058497-project-member] Expecting reply to msg 748908b782f442d886fb9a018445f230 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 727.073117] env[62096]: DEBUG nova.compute.manager [None req-5b2f59bb-27e5-498c-aa13-1fed23947587 tempest-ServerRescueTestJSON-190058497 tempest-ServerRescueTestJSON-190058497-project-member] [instance: d38be540-9cd0-428a-b10d-313d2d464b25] Allocating IP information in the background. {{(pid=62096) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 727.073117] env[62096]: DEBUG nova.network.neutron [None req-5b2f59bb-27e5-498c-aa13-1fed23947587 tempest-ServerRescueTestJSON-190058497 tempest-ServerRescueTestJSON-190058497-project-member] [instance: d38be540-9cd0-428a-b10d-313d2d464b25] allocate_for_instance() {{(pid=62096) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 727.082006] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 748908b782f442d886fb9a018445f230 [ 727.127123] env[62096]: DEBUG nova.policy [None req-5b2f59bb-27e5-498c-aa13-1fed23947587 tempest-ServerRescueTestJSON-190058497 tempest-ServerRescueTestJSON-190058497-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '10e5625daadc4f469f683856e9dbd4e1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c8046bb74b4d432eb9fe7763fc2cf698', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62096) authorize /opt/stack/nova/nova/policy.py:203}} [ 727.282738] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-31582930-cf6e-43cb-a675-23470985f47b tempest-ServerActionsV293TestJSON-192366267 tempest-ServerActionsV293TestJSON-192366267-project-member] Expecting reply to msg 74ad9bf1f47543e391a35dff4904710d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 727.321718] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 74ad9bf1f47543e391a35dff4904710d [ 727.350667] env[62096]: DEBUG oslo_concurrency.lockutils [req-7ff3cb17-c2bc-48ec-a274-65e5c606be26 req-b9bb63bd-60fa-4c62-b9ea-c5fbec984c53 service nova] Releasing lock "refresh_cache-03570c3d-3ca5-495d-8a52-2f86b280f667" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 727.467431] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c09449e3-e218-4252-a4fd-21724a287e90 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.472598] env[62096]: DEBUG nova.network.neutron [None req-5b2f59bb-27e5-498c-aa13-1fed23947587 tempest-ServerRescueTestJSON-190058497 tempest-ServerRescueTestJSON-190058497-project-member] [instance: d38be540-9cd0-428a-b10d-313d2d464b25] Successfully created port: 1c25f8be-60a8-4ef8-b3db-1095b6a81385 {{(pid=62096) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 727.479443] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dff83ae-edf8-4595-9149-c6e0d7742f40 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.515968] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2fd94aa-a787-4bce-8b42-26e9be736081 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.523377] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fc1fb45-4ccc-47e9-b07e-49178762b63a {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.536761] env[62096]: DEBUG nova.compute.provider_tree [None req-f283c87f-a0d6-46b3-ba4c-0ee08c3c4930 tempest-ImagesOneServerNegativeTestJSON-1092536401 tempest-ImagesOneServerNegativeTestJSON-1092536401-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 727.537278] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-f283c87f-a0d6-46b3-ba4c-0ee08c3c4930 tempest-ImagesOneServerNegativeTestJSON-1092536401 tempest-ImagesOneServerNegativeTestJSON-1092536401-project-member] Expecting reply to msg d5c0585030c54b0f81eff4216b0e49b6 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 727.554060] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d5c0585030c54b0f81eff4216b0e49b6 [ 727.577436] env[62096]: DEBUG nova.compute.manager [None req-5b2f59bb-27e5-498c-aa13-1fed23947587 tempest-ServerRescueTestJSON-190058497 tempest-ServerRescueTestJSON-190058497-project-member] [instance: d38be540-9cd0-428a-b10d-313d2d464b25] Start building block device mappings for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 727.579155] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-5b2f59bb-27e5-498c-aa13-1fed23947587 tempest-ServerRescueTestJSON-190058497 tempest-ServerRescueTestJSON-190058497-project-member] Expecting reply to msg 52e86d46cf374d20be832f621339f6e7 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 727.654228] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 52e86d46cf374d20be832f621339f6e7 [ 727.785991] env[62096]: DEBUG nova.compute.manager [None req-31582930-cf6e-43cb-a675-23470985f47b tempest-ServerActionsV293TestJSON-192366267 tempest-ServerActionsV293TestJSON-192366267-project-member] [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] Start spawning the instance on the hypervisor. {{(pid=62096) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 727.786558] env[62096]: DEBUG nova.virt.hardware [None req-31582930-cf6e-43cb-a675-23470985f47b tempest-ServerActionsV293TestJSON-192366267 tempest-ServerActionsV293TestJSON-192366267-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-19T11:52:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 727.786776] env[62096]: DEBUG nova.virt.hardware [None req-31582930-cf6e-43cb-a675-23470985f47b tempest-ServerActionsV293TestJSON-192366267 tempest-ServerActionsV293TestJSON-192366267-project-member] Flavor limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 727.786965] env[62096]: DEBUG nova.virt.hardware [None req-31582930-cf6e-43cb-a675-23470985f47b tempest-ServerActionsV293TestJSON-192366267 tempest-ServerActionsV293TestJSON-192366267-project-member] Image limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 727.787797] env[62096]: DEBUG nova.virt.hardware [None req-31582930-cf6e-43cb-a675-23470985f47b tempest-ServerActionsV293TestJSON-192366267 tempest-ServerActionsV293TestJSON-192366267-project-member] Flavor pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 727.787797] env[62096]: DEBUG nova.virt.hardware [None req-31582930-cf6e-43cb-a675-23470985f47b tempest-ServerActionsV293TestJSON-192366267 tempest-ServerActionsV293TestJSON-192366267-project-member] Image pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 727.787797] env[62096]: DEBUG nova.virt.hardware [None req-31582930-cf6e-43cb-a675-23470985f47b tempest-ServerActionsV293TestJSON-192366267 tempest-ServerActionsV293TestJSON-192366267-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 727.787797] env[62096]: DEBUG nova.virt.hardware [None req-31582930-cf6e-43cb-a675-23470985f47b tempest-ServerActionsV293TestJSON-192366267 tempest-ServerActionsV293TestJSON-192366267-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 727.787797] env[62096]: DEBUG nova.virt.hardware [None req-31582930-cf6e-43cb-a675-23470985f47b tempest-ServerActionsV293TestJSON-192366267 tempest-ServerActionsV293TestJSON-192366267-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 727.788097] env[62096]: DEBUG nova.virt.hardware [None req-31582930-cf6e-43cb-a675-23470985f47b tempest-ServerActionsV293TestJSON-192366267 tempest-ServerActionsV293TestJSON-192366267-project-member] Got 1 possible topologies {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 727.788131] env[62096]: DEBUG nova.virt.hardware [None req-31582930-cf6e-43cb-a675-23470985f47b tempest-ServerActionsV293TestJSON-192366267 tempest-ServerActionsV293TestJSON-192366267-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 727.788938] env[62096]: DEBUG nova.virt.hardware [None req-31582930-cf6e-43cb-a675-23470985f47b tempest-ServerActionsV293TestJSON-192366267 tempest-ServerActionsV293TestJSON-192366267-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 727.789178] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c9a90a9-3e77-480a-be34-c4e68dc1b081 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.803972] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cf4eb81-0892-4205-868f-ebceee5013ef {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.825195] env[62096]: ERROR nova.compute.manager [None req-31582930-cf6e-43cb-a675-23470985f47b tempest-ServerActionsV293TestJSON-192366267 tempest-ServerActionsV293TestJSON-192366267-project-member] [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port c981a4c6-a985-4393-9bca-e1f2dd45c026, please check neutron logs for more information. [ 727.825195] env[62096]: ERROR nova.compute.manager [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] Traceback (most recent call last): [ 727.825195] env[62096]: ERROR nova.compute.manager [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 727.825195] env[62096]: ERROR nova.compute.manager [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] yield resources [ 727.825195] env[62096]: ERROR nova.compute.manager [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 727.825195] env[62096]: ERROR nova.compute.manager [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] self.driver.spawn(context, instance, image_meta, [ 727.825195] env[62096]: ERROR nova.compute.manager [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 727.825195] env[62096]: ERROR nova.compute.manager [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] self._vmops.spawn(context, instance, image_meta, injected_files, [ 727.825195] env[62096]: ERROR nova.compute.manager [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 727.825195] env[62096]: ERROR nova.compute.manager [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] vm_ref = self.build_virtual_machine(instance, [ 727.825195] env[62096]: ERROR nova.compute.manager [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 727.825625] env[62096]: ERROR nova.compute.manager [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] vif_infos = vmwarevif.get_vif_info(self._session, [ 727.825625] env[62096]: ERROR nova.compute.manager [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 727.825625] env[62096]: ERROR nova.compute.manager [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] for vif in network_info: [ 727.825625] env[62096]: ERROR nova.compute.manager [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 727.825625] env[62096]: ERROR nova.compute.manager [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] return self._sync_wrapper(fn, *args, **kwargs) [ 727.825625] env[62096]: ERROR nova.compute.manager [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 727.825625] env[62096]: ERROR nova.compute.manager [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] self.wait() [ 727.825625] env[62096]: ERROR nova.compute.manager [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 727.825625] env[62096]: ERROR nova.compute.manager [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] self[:] = self._gt.wait() [ 727.825625] env[62096]: ERROR nova.compute.manager [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 727.825625] env[62096]: ERROR nova.compute.manager [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] return self._exit_event.wait() [ 727.825625] env[62096]: ERROR nova.compute.manager [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 727.825625] env[62096]: ERROR nova.compute.manager [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] current.throw(*self._exc) [ 727.826062] env[62096]: ERROR nova.compute.manager [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 727.826062] env[62096]: ERROR nova.compute.manager [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] result = function(*args, **kwargs) [ 727.826062] env[62096]: ERROR nova.compute.manager [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 727.826062] env[62096]: ERROR nova.compute.manager [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] return func(*args, **kwargs) [ 727.826062] env[62096]: ERROR nova.compute.manager [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 727.826062] env[62096]: ERROR nova.compute.manager [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] raise e [ 727.826062] env[62096]: ERROR nova.compute.manager [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 727.826062] env[62096]: ERROR nova.compute.manager [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] nwinfo = self.network_api.allocate_for_instance( [ 727.826062] env[62096]: ERROR nova.compute.manager [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 727.826062] env[62096]: ERROR nova.compute.manager [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] created_port_ids = self._update_ports_for_instance( [ 727.826062] env[62096]: ERROR nova.compute.manager [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 727.826062] env[62096]: ERROR nova.compute.manager [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] with excutils.save_and_reraise_exception(): [ 727.826062] env[62096]: ERROR nova.compute.manager [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 727.826453] env[62096]: ERROR nova.compute.manager [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] self.force_reraise() [ 727.826453] env[62096]: ERROR nova.compute.manager [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 727.826453] env[62096]: ERROR nova.compute.manager [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] raise self.value [ 727.826453] env[62096]: ERROR nova.compute.manager [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 727.826453] env[62096]: ERROR nova.compute.manager [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] updated_port = self._update_port( [ 727.826453] env[62096]: ERROR nova.compute.manager [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 727.826453] env[62096]: ERROR nova.compute.manager [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] _ensure_no_port_binding_failure(port) [ 727.826453] env[62096]: ERROR nova.compute.manager [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 727.826453] env[62096]: ERROR nova.compute.manager [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] raise exception.PortBindingFailed(port_id=port['id']) [ 727.826453] env[62096]: ERROR nova.compute.manager [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] nova.exception.PortBindingFailed: Binding failed for port c981a4c6-a985-4393-9bca-e1f2dd45c026, please check neutron logs for more information. [ 727.826453] env[62096]: ERROR nova.compute.manager [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] [ 727.826453] env[62096]: INFO nova.compute.manager [None req-31582930-cf6e-43cb-a675-23470985f47b tempest-ServerActionsV293TestJSON-192366267 tempest-ServerActionsV293TestJSON-192366267-project-member] [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] Terminating instance [ 727.829513] env[62096]: DEBUG oslo_concurrency.lockutils [None req-31582930-cf6e-43cb-a675-23470985f47b tempest-ServerActionsV293TestJSON-192366267 tempest-ServerActionsV293TestJSON-192366267-project-member] Acquiring lock "refresh_cache-03570c3d-3ca5-495d-8a52-2f86b280f667" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 727.829513] env[62096]: DEBUG oslo_concurrency.lockutils [None req-31582930-cf6e-43cb-a675-23470985f47b tempest-ServerActionsV293TestJSON-192366267 tempest-ServerActionsV293TestJSON-192366267-project-member] Acquired lock "refresh_cache-03570c3d-3ca5-495d-8a52-2f86b280f667" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 727.829513] env[62096]: DEBUG nova.network.neutron [None req-31582930-cf6e-43cb-a675-23470985f47b tempest-ServerActionsV293TestJSON-192366267 tempest-ServerActionsV293TestJSON-192366267-project-member] [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 727.830990] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-31582930-cf6e-43cb-a675-23470985f47b tempest-ServerActionsV293TestJSON-192366267 tempest-ServerActionsV293TestJSON-192366267-project-member] Expecting reply to msg 4bad097cc71846cea4d2b5fcae9ddf10 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 727.838775] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4bad097cc71846cea4d2b5fcae9ddf10 [ 728.040327] env[62096]: DEBUG nova.scheduler.client.report [None req-f283c87f-a0d6-46b3-ba4c-0ee08c3c4930 tempest-ImagesOneServerNegativeTestJSON-1092536401 tempest-ImagesOneServerNegativeTestJSON-1092536401-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 728.042945] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-f283c87f-a0d6-46b3-ba4c-0ee08c3c4930 tempest-ImagesOneServerNegativeTestJSON-1092536401 tempest-ImagesOneServerNegativeTestJSON-1092536401-project-member] Expecting reply to msg 1daee0a0f6834bc4916de3175e14e7b2 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 728.063202] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1daee0a0f6834bc4916de3175e14e7b2 [ 728.083852] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-5b2f59bb-27e5-498c-aa13-1fed23947587 tempest-ServerRescueTestJSON-190058497 tempest-ServerRescueTestJSON-190058497-project-member] Expecting reply to msg b3969395072a4012b9df2c82c6e2ebb1 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 728.122733] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b3969395072a4012b9df2c82c6e2ebb1 [ 728.365546] env[62096]: DEBUG nova.network.neutron [None req-31582930-cf6e-43cb-a675-23470985f47b tempest-ServerActionsV293TestJSON-192366267 tempest-ServerActionsV293TestJSON-192366267-project-member] [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 728.536542] env[62096]: DEBUG nova.network.neutron [None req-31582930-cf6e-43cb-a675-23470985f47b tempest-ServerActionsV293TestJSON-192366267 tempest-ServerActionsV293TestJSON-192366267-project-member] [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 728.537054] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-31582930-cf6e-43cb-a675-23470985f47b tempest-ServerActionsV293TestJSON-192366267 tempest-ServerActionsV293TestJSON-192366267-project-member] Expecting reply to msg 0f479c8064a949e49dba63da94e64f0c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 728.549076] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0f479c8064a949e49dba63da94e64f0c [ 728.551375] env[62096]: DEBUG oslo_concurrency.lockutils [None req-f283c87f-a0d6-46b3-ba4c-0ee08c3c4930 tempest-ImagesOneServerNegativeTestJSON-1092536401 tempest-ImagesOneServerNegativeTestJSON-1092536401-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.985s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 728.552026] env[62096]: ERROR nova.compute.manager [None req-f283c87f-a0d6-46b3-ba4c-0ee08c3c4930 tempest-ImagesOneServerNegativeTestJSON-1092536401 tempest-ImagesOneServerNegativeTestJSON-1092536401-project-member] [instance: 7d62e302-8080-4699-b88d-cb29031e6707] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 6421eacc-4575-4a31-a454-a75f25e03132, please check neutron logs for more information. [ 728.552026] env[62096]: ERROR nova.compute.manager [instance: 7d62e302-8080-4699-b88d-cb29031e6707] Traceback (most recent call last): [ 728.552026] env[62096]: ERROR nova.compute.manager [instance: 7d62e302-8080-4699-b88d-cb29031e6707] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 728.552026] env[62096]: ERROR nova.compute.manager [instance: 7d62e302-8080-4699-b88d-cb29031e6707] self.driver.spawn(context, instance, image_meta, [ 728.552026] env[62096]: ERROR nova.compute.manager [instance: 7d62e302-8080-4699-b88d-cb29031e6707] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 728.552026] env[62096]: ERROR nova.compute.manager [instance: 7d62e302-8080-4699-b88d-cb29031e6707] self._vmops.spawn(context, instance, image_meta, injected_files, [ 728.552026] env[62096]: ERROR nova.compute.manager [instance: 7d62e302-8080-4699-b88d-cb29031e6707] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 728.552026] env[62096]: ERROR nova.compute.manager [instance: 7d62e302-8080-4699-b88d-cb29031e6707] vm_ref = self.build_virtual_machine(instance, [ 728.552026] env[62096]: ERROR nova.compute.manager [instance: 7d62e302-8080-4699-b88d-cb29031e6707] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 728.552026] env[62096]: ERROR nova.compute.manager [instance: 7d62e302-8080-4699-b88d-cb29031e6707] vif_infos = vmwarevif.get_vif_info(self._session, [ 728.552026] env[62096]: ERROR nova.compute.manager [instance: 7d62e302-8080-4699-b88d-cb29031e6707] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 728.552458] env[62096]: ERROR nova.compute.manager [instance: 7d62e302-8080-4699-b88d-cb29031e6707] for vif in network_info: [ 728.552458] env[62096]: ERROR nova.compute.manager [instance: 7d62e302-8080-4699-b88d-cb29031e6707] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 728.552458] env[62096]: ERROR nova.compute.manager [instance: 7d62e302-8080-4699-b88d-cb29031e6707] return self._sync_wrapper(fn, *args, **kwargs) [ 728.552458] env[62096]: ERROR nova.compute.manager [instance: 7d62e302-8080-4699-b88d-cb29031e6707] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 728.552458] env[62096]: ERROR nova.compute.manager [instance: 7d62e302-8080-4699-b88d-cb29031e6707] self.wait() [ 728.552458] env[62096]: ERROR nova.compute.manager [instance: 7d62e302-8080-4699-b88d-cb29031e6707] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 728.552458] env[62096]: ERROR nova.compute.manager [instance: 7d62e302-8080-4699-b88d-cb29031e6707] self[:] = self._gt.wait() [ 728.552458] env[62096]: ERROR nova.compute.manager [instance: 7d62e302-8080-4699-b88d-cb29031e6707] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 728.552458] env[62096]: ERROR nova.compute.manager [instance: 7d62e302-8080-4699-b88d-cb29031e6707] return self._exit_event.wait() [ 728.552458] env[62096]: ERROR nova.compute.manager [instance: 7d62e302-8080-4699-b88d-cb29031e6707] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 728.552458] env[62096]: ERROR nova.compute.manager [instance: 7d62e302-8080-4699-b88d-cb29031e6707] result = hub.switch() [ 728.552458] env[62096]: ERROR nova.compute.manager [instance: 7d62e302-8080-4699-b88d-cb29031e6707] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 728.552458] env[62096]: ERROR nova.compute.manager [instance: 7d62e302-8080-4699-b88d-cb29031e6707] return self.greenlet.switch() [ 728.552848] env[62096]: ERROR nova.compute.manager [instance: 7d62e302-8080-4699-b88d-cb29031e6707] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 728.552848] env[62096]: ERROR nova.compute.manager [instance: 7d62e302-8080-4699-b88d-cb29031e6707] result = function(*args, **kwargs) [ 728.552848] env[62096]: ERROR nova.compute.manager [instance: 7d62e302-8080-4699-b88d-cb29031e6707] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 728.552848] env[62096]: ERROR nova.compute.manager [instance: 7d62e302-8080-4699-b88d-cb29031e6707] return func(*args, **kwargs) [ 728.552848] env[62096]: ERROR nova.compute.manager [instance: 7d62e302-8080-4699-b88d-cb29031e6707] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 728.552848] env[62096]: ERROR nova.compute.manager [instance: 7d62e302-8080-4699-b88d-cb29031e6707] raise e [ 728.552848] env[62096]: ERROR nova.compute.manager [instance: 7d62e302-8080-4699-b88d-cb29031e6707] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 728.552848] env[62096]: ERROR nova.compute.manager [instance: 7d62e302-8080-4699-b88d-cb29031e6707] nwinfo = self.network_api.allocate_for_instance( [ 728.552848] env[62096]: ERROR nova.compute.manager [instance: 7d62e302-8080-4699-b88d-cb29031e6707] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 728.552848] env[62096]: ERROR nova.compute.manager [instance: 7d62e302-8080-4699-b88d-cb29031e6707] created_port_ids = self._update_ports_for_instance( [ 728.552848] env[62096]: ERROR nova.compute.manager [instance: 7d62e302-8080-4699-b88d-cb29031e6707] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 728.552848] env[62096]: ERROR nova.compute.manager [instance: 7d62e302-8080-4699-b88d-cb29031e6707] with excutils.save_and_reraise_exception(): [ 728.552848] env[62096]: ERROR nova.compute.manager [instance: 7d62e302-8080-4699-b88d-cb29031e6707] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 728.553234] env[62096]: ERROR nova.compute.manager [instance: 7d62e302-8080-4699-b88d-cb29031e6707] self.force_reraise() [ 728.553234] env[62096]: ERROR nova.compute.manager [instance: 7d62e302-8080-4699-b88d-cb29031e6707] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 728.553234] env[62096]: ERROR nova.compute.manager [instance: 7d62e302-8080-4699-b88d-cb29031e6707] raise self.value [ 728.553234] env[62096]: ERROR nova.compute.manager [instance: 7d62e302-8080-4699-b88d-cb29031e6707] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 728.553234] env[62096]: ERROR nova.compute.manager [instance: 7d62e302-8080-4699-b88d-cb29031e6707] updated_port = self._update_port( [ 728.553234] env[62096]: ERROR nova.compute.manager [instance: 7d62e302-8080-4699-b88d-cb29031e6707] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 728.553234] env[62096]: ERROR nova.compute.manager [instance: 7d62e302-8080-4699-b88d-cb29031e6707] _ensure_no_port_binding_failure(port) [ 728.553234] env[62096]: ERROR nova.compute.manager [instance: 7d62e302-8080-4699-b88d-cb29031e6707] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 728.553234] env[62096]: ERROR nova.compute.manager [instance: 7d62e302-8080-4699-b88d-cb29031e6707] raise exception.PortBindingFailed(port_id=port['id']) [ 728.553234] env[62096]: ERROR nova.compute.manager [instance: 7d62e302-8080-4699-b88d-cb29031e6707] nova.exception.PortBindingFailed: Binding failed for port 6421eacc-4575-4a31-a454-a75f25e03132, please check neutron logs for more information. [ 728.553234] env[62096]: ERROR nova.compute.manager [instance: 7d62e302-8080-4699-b88d-cb29031e6707] [ 728.553924] env[62096]: DEBUG nova.compute.utils [None req-f283c87f-a0d6-46b3-ba4c-0ee08c3c4930 tempest-ImagesOneServerNegativeTestJSON-1092536401 tempest-ImagesOneServerNegativeTestJSON-1092536401-project-member] [instance: 7d62e302-8080-4699-b88d-cb29031e6707] Binding failed for port 6421eacc-4575-4a31-a454-a75f25e03132, please check neutron logs for more information. {{(pid=62096) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 728.553924] env[62096]: DEBUG oslo_concurrency.lockutils [None req-1aa056a4-9f55-42a7-a362-b5b727b382b0 tempest-ServersTestManualDisk-1610766175 tempest-ServersTestManualDisk-1610766175-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.151s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 728.555738] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1aa056a4-9f55-42a7-a362-b5b727b382b0 tempest-ServersTestManualDisk-1610766175 tempest-ServersTestManualDisk-1610766175-project-member] Expecting reply to msg 217ff8c0aade40929f1187bad84cb2bf in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 728.558396] env[62096]: DEBUG nova.compute.manager [None req-f283c87f-a0d6-46b3-ba4c-0ee08c3c4930 tempest-ImagesOneServerNegativeTestJSON-1092536401 tempest-ImagesOneServerNegativeTestJSON-1092536401-project-member] [instance: 7d62e302-8080-4699-b88d-cb29031e6707] Build of instance 7d62e302-8080-4699-b88d-cb29031e6707 was re-scheduled: Binding failed for port 6421eacc-4575-4a31-a454-a75f25e03132, please check neutron logs for more information. {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 728.559298] env[62096]: DEBUG nova.compute.manager [None req-f283c87f-a0d6-46b3-ba4c-0ee08c3c4930 tempest-ImagesOneServerNegativeTestJSON-1092536401 tempest-ImagesOneServerNegativeTestJSON-1092536401-project-member] [instance: 7d62e302-8080-4699-b88d-cb29031e6707] Unplugging VIFs for instance {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 728.559893] env[62096]: DEBUG oslo_concurrency.lockutils [None req-f283c87f-a0d6-46b3-ba4c-0ee08c3c4930 tempest-ImagesOneServerNegativeTestJSON-1092536401 tempest-ImagesOneServerNegativeTestJSON-1092536401-project-member] Acquiring lock "refresh_cache-7d62e302-8080-4699-b88d-cb29031e6707" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 728.560241] env[62096]: DEBUG oslo_concurrency.lockutils [None req-f283c87f-a0d6-46b3-ba4c-0ee08c3c4930 tempest-ImagesOneServerNegativeTestJSON-1092536401 tempest-ImagesOneServerNegativeTestJSON-1092536401-project-member] Acquired lock "refresh_cache-7d62e302-8080-4699-b88d-cb29031e6707" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 728.560429] env[62096]: DEBUG nova.network.neutron [None req-f283c87f-a0d6-46b3-ba4c-0ee08c3c4930 tempest-ImagesOneServerNegativeTestJSON-1092536401 tempest-ImagesOneServerNegativeTestJSON-1092536401-project-member] [instance: 7d62e302-8080-4699-b88d-cb29031e6707] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 728.560815] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-f283c87f-a0d6-46b3-ba4c-0ee08c3c4930 tempest-ImagesOneServerNegativeTestJSON-1092536401 tempest-ImagesOneServerNegativeTestJSON-1092536401-project-member] Expecting reply to msg 9aad9cbff36b427e98e025b704b8ff63 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 728.570267] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9aad9cbff36b427e98e025b704b8ff63 [ 728.587205] env[62096]: DEBUG nova.compute.manager [None req-5b2f59bb-27e5-498c-aa13-1fed23947587 tempest-ServerRescueTestJSON-190058497 tempest-ServerRescueTestJSON-190058497-project-member] [instance: d38be540-9cd0-428a-b10d-313d2d464b25] Start spawning the instance on the hypervisor. {{(pid=62096) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 728.591885] env[62096]: DEBUG nova.compute.manager [req-df3d3cf3-e57e-4d77-9550-9c4187e4250b req-b20e4d6c-1a9e-4fb7-ad72-eea09472db48 service nova] [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] Received event network-vif-deleted-c981a4c6-a985-4393-9bca-e1f2dd45c026 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 728.597656] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 217ff8c0aade40929f1187bad84cb2bf [ 728.617365] env[62096]: DEBUG nova.virt.hardware [None req-5b2f59bb-27e5-498c-aa13-1fed23947587 tempest-ServerRescueTestJSON-190058497 tempest-ServerRescueTestJSON-190058497-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-19T11:52:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-19T11:51:55Z,direct_url=,disk_format='vmdk',id=fb764baa-3805-45c4-a694-aa91b0932110,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bd76085d004b408bbcf1ebf76ba2593e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-19T11:51:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 728.617365] env[62096]: DEBUG nova.virt.hardware [None req-5b2f59bb-27e5-498c-aa13-1fed23947587 tempest-ServerRescueTestJSON-190058497 tempest-ServerRescueTestJSON-190058497-project-member] Flavor limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 728.617365] env[62096]: DEBUG nova.virt.hardware [None req-5b2f59bb-27e5-498c-aa13-1fed23947587 tempest-ServerRescueTestJSON-190058497 tempest-ServerRescueTestJSON-190058497-project-member] Image limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 728.617365] env[62096]: DEBUG nova.virt.hardware [None req-5b2f59bb-27e5-498c-aa13-1fed23947587 tempest-ServerRescueTestJSON-190058497 tempest-ServerRescueTestJSON-190058497-project-member] Flavor pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 728.617572] env[62096]: DEBUG nova.virt.hardware [None req-5b2f59bb-27e5-498c-aa13-1fed23947587 tempest-ServerRescueTestJSON-190058497 tempest-ServerRescueTestJSON-190058497-project-member] Image pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 728.617572] env[62096]: DEBUG nova.virt.hardware [None req-5b2f59bb-27e5-498c-aa13-1fed23947587 tempest-ServerRescueTestJSON-190058497 tempest-ServerRescueTestJSON-190058497-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 728.617687] env[62096]: DEBUG nova.virt.hardware [None req-5b2f59bb-27e5-498c-aa13-1fed23947587 tempest-ServerRescueTestJSON-190058497 tempest-ServerRescueTestJSON-190058497-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 728.618273] env[62096]: DEBUG nova.virt.hardware [None req-5b2f59bb-27e5-498c-aa13-1fed23947587 tempest-ServerRescueTestJSON-190058497 tempest-ServerRescueTestJSON-190058497-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 728.618273] env[62096]: DEBUG nova.virt.hardware [None req-5b2f59bb-27e5-498c-aa13-1fed23947587 tempest-ServerRescueTestJSON-190058497 tempest-ServerRescueTestJSON-190058497-project-member] Got 1 possible topologies {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 728.618273] env[62096]: DEBUG nova.virt.hardware [None req-5b2f59bb-27e5-498c-aa13-1fed23947587 tempest-ServerRescueTestJSON-190058497 tempest-ServerRescueTestJSON-190058497-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 728.618456] env[62096]: DEBUG nova.virt.hardware [None req-5b2f59bb-27e5-498c-aa13-1fed23947587 tempest-ServerRescueTestJSON-190058497 tempest-ServerRescueTestJSON-190058497-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 728.619425] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dbb16a6-ae16-4b13-b006-d63357c82811 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.627938] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5a0ca88-5440-4e8e-9213-5a1252b1f45e {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.946654] env[62096]: DEBUG oslo_concurrency.lockutils [None req-265784be-d456-4382-9cd2-a751a291282e tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Acquiring lock "012afef5-91e9-4fc1-af98-c17a3188ad45" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 728.946899] env[62096]: DEBUG oslo_concurrency.lockutils [None req-265784be-d456-4382-9cd2-a751a291282e tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Lock "012afef5-91e9-4fc1-af98-c17a3188ad45" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 729.039401] env[62096]: DEBUG oslo_concurrency.lockutils [None req-31582930-cf6e-43cb-a675-23470985f47b tempest-ServerActionsV293TestJSON-192366267 tempest-ServerActionsV293TestJSON-192366267-project-member] Releasing lock "refresh_cache-03570c3d-3ca5-495d-8a52-2f86b280f667" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 729.040118] env[62096]: DEBUG nova.compute.manager [None req-31582930-cf6e-43cb-a675-23470985f47b tempest-ServerActionsV293TestJSON-192366267 tempest-ServerActionsV293TestJSON-192366267-project-member] [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] Start destroying the instance on the hypervisor. {{(pid=62096) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 729.040521] env[62096]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8883fba5-38cf-47cd-8b72-a1c0e9b08a58 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.053579] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16c53c5e-7187-4643-8b94-146f92b7b6dd {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.082438] env[62096]: WARNING nova.virt.vmwareapi.driver [None req-31582930-cf6e-43cb-a675-23470985f47b tempest-ServerActionsV293TestJSON-192366267 tempest-ServerActionsV293TestJSON-192366267-project-member] [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] Instance does not exists. Proceeding to delete instance properties on datastore: nova.exception.InstanceNotFound: Instance 03570c3d-3ca5-495d-8a52-2f86b280f667 could not be found. [ 729.082811] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-31582930-cf6e-43cb-a675-23470985f47b tempest-ServerActionsV293TestJSON-192366267 tempest-ServerActionsV293TestJSON-192366267-project-member] [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] Destroying instance {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 729.083213] env[62096]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ef421d2d-29c7-48aa-a1fe-dc4f7f71d889 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.093841] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a5c7ebf-0a61-42db-9d7c-aa09d9bd834b {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.117202] env[62096]: WARNING nova.virt.vmwareapi.vmops [None req-31582930-cf6e-43cb-a675-23470985f47b tempest-ServerActionsV293TestJSON-192366267 tempest-ServerActionsV293TestJSON-192366267-project-member] [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 03570c3d-3ca5-495d-8a52-2f86b280f667 could not be found. [ 729.117567] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-31582930-cf6e-43cb-a675-23470985f47b tempest-ServerActionsV293TestJSON-192366267 tempest-ServerActionsV293TestJSON-192366267-project-member] [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] Instance destroyed {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 729.117848] env[62096]: INFO nova.compute.manager [None req-31582930-cf6e-43cb-a675-23470985f47b tempest-ServerActionsV293TestJSON-192366267 tempest-ServerActionsV293TestJSON-192366267-project-member] [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] Took 0.08 seconds to destroy the instance on the hypervisor. [ 729.118412] env[62096]: DEBUG oslo.service.loopingcall [None req-31582930-cf6e-43cb-a675-23470985f47b tempest-ServerActionsV293TestJSON-192366267 tempest-ServerActionsV293TestJSON-192366267-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 729.122476] env[62096]: DEBUG nova.network.neutron [None req-f283c87f-a0d6-46b3-ba4c-0ee08c3c4930 tempest-ImagesOneServerNegativeTestJSON-1092536401 tempest-ImagesOneServerNegativeTestJSON-1092536401-project-member] [instance: 7d62e302-8080-4699-b88d-cb29031e6707] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 729.124794] env[62096]: DEBUG nova.compute.manager [-] [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 729.125047] env[62096]: DEBUG nova.network.neutron [-] [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 729.144374] env[62096]: DEBUG nova.network.neutron [-] [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 729.145312] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 8ef2349a16794b0c8e80346f4450ecf1 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 729.155916] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8ef2349a16794b0c8e80346f4450ecf1 [ 729.255875] env[62096]: ERROR nova.compute.manager [None req-5b2f59bb-27e5-498c-aa13-1fed23947587 tempest-ServerRescueTestJSON-190058497 tempest-ServerRescueTestJSON-190058497-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 1c25f8be-60a8-4ef8-b3db-1095b6a81385, please check neutron logs for more information. [ 729.255875] env[62096]: ERROR nova.compute.manager Traceback (most recent call last): [ 729.255875] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 729.255875] env[62096]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 729.255875] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 729.255875] env[62096]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 729.255875] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 729.255875] env[62096]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 729.255875] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 729.255875] env[62096]: ERROR nova.compute.manager self.force_reraise() [ 729.255875] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 729.255875] env[62096]: ERROR nova.compute.manager raise self.value [ 729.255875] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 729.255875] env[62096]: ERROR nova.compute.manager updated_port = self._update_port( [ 729.255875] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 729.255875] env[62096]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 729.256503] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 729.256503] env[62096]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 729.256503] env[62096]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 1c25f8be-60a8-4ef8-b3db-1095b6a81385, please check neutron logs for more information. [ 729.256503] env[62096]: ERROR nova.compute.manager [ 729.256503] env[62096]: Traceback (most recent call last): [ 729.256503] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 729.256503] env[62096]: listener.cb(fileno) [ 729.256503] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 729.256503] env[62096]: result = function(*args, **kwargs) [ 729.256503] env[62096]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 729.256503] env[62096]: return func(*args, **kwargs) [ 729.256503] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 729.256503] env[62096]: raise e [ 729.256503] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 729.256503] env[62096]: nwinfo = self.network_api.allocate_for_instance( [ 729.256503] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 729.256503] env[62096]: created_port_ids = self._update_ports_for_instance( [ 729.256503] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 729.256503] env[62096]: with excutils.save_and_reraise_exception(): [ 729.256503] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 729.256503] env[62096]: self.force_reraise() [ 729.256503] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 729.256503] env[62096]: raise self.value [ 729.256503] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 729.256503] env[62096]: updated_port = self._update_port( [ 729.256503] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 729.256503] env[62096]: _ensure_no_port_binding_failure(port) [ 729.256503] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 729.256503] env[62096]: raise exception.PortBindingFailed(port_id=port['id']) [ 729.257568] env[62096]: nova.exception.PortBindingFailed: Binding failed for port 1c25f8be-60a8-4ef8-b3db-1095b6a81385, please check neutron logs for more information. [ 729.257568] env[62096]: Removing descriptor: 16 [ 729.257568] env[62096]: ERROR nova.compute.manager [None req-5b2f59bb-27e5-498c-aa13-1fed23947587 tempest-ServerRescueTestJSON-190058497 tempest-ServerRescueTestJSON-190058497-project-member] [instance: d38be540-9cd0-428a-b10d-313d2d464b25] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 1c25f8be-60a8-4ef8-b3db-1095b6a81385, please check neutron logs for more information. [ 729.257568] env[62096]: ERROR nova.compute.manager [instance: d38be540-9cd0-428a-b10d-313d2d464b25] Traceback (most recent call last): [ 729.257568] env[62096]: ERROR nova.compute.manager [instance: d38be540-9cd0-428a-b10d-313d2d464b25] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 729.257568] env[62096]: ERROR nova.compute.manager [instance: d38be540-9cd0-428a-b10d-313d2d464b25] yield resources [ 729.257568] env[62096]: ERROR nova.compute.manager [instance: d38be540-9cd0-428a-b10d-313d2d464b25] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 729.257568] env[62096]: ERROR nova.compute.manager [instance: d38be540-9cd0-428a-b10d-313d2d464b25] self.driver.spawn(context, instance, image_meta, [ 729.257568] env[62096]: ERROR nova.compute.manager [instance: d38be540-9cd0-428a-b10d-313d2d464b25] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 729.257568] env[62096]: ERROR nova.compute.manager [instance: d38be540-9cd0-428a-b10d-313d2d464b25] self._vmops.spawn(context, instance, image_meta, injected_files, [ 729.257568] env[62096]: ERROR nova.compute.manager [instance: d38be540-9cd0-428a-b10d-313d2d464b25] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 729.257568] env[62096]: ERROR nova.compute.manager [instance: d38be540-9cd0-428a-b10d-313d2d464b25] vm_ref = self.build_virtual_machine(instance, [ 729.257979] env[62096]: ERROR nova.compute.manager [instance: d38be540-9cd0-428a-b10d-313d2d464b25] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 729.257979] env[62096]: ERROR nova.compute.manager [instance: d38be540-9cd0-428a-b10d-313d2d464b25] vif_infos = vmwarevif.get_vif_info(self._session, [ 729.257979] env[62096]: ERROR nova.compute.manager [instance: d38be540-9cd0-428a-b10d-313d2d464b25] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 729.257979] env[62096]: ERROR nova.compute.manager [instance: d38be540-9cd0-428a-b10d-313d2d464b25] for vif in network_info: [ 729.257979] env[62096]: ERROR nova.compute.manager [instance: d38be540-9cd0-428a-b10d-313d2d464b25] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 729.257979] env[62096]: ERROR nova.compute.manager [instance: d38be540-9cd0-428a-b10d-313d2d464b25] return self._sync_wrapper(fn, *args, **kwargs) [ 729.257979] env[62096]: ERROR nova.compute.manager [instance: d38be540-9cd0-428a-b10d-313d2d464b25] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 729.257979] env[62096]: ERROR nova.compute.manager [instance: d38be540-9cd0-428a-b10d-313d2d464b25] self.wait() [ 729.257979] env[62096]: ERROR nova.compute.manager [instance: d38be540-9cd0-428a-b10d-313d2d464b25] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 729.257979] env[62096]: ERROR nova.compute.manager [instance: d38be540-9cd0-428a-b10d-313d2d464b25] self[:] = self._gt.wait() [ 729.257979] env[62096]: ERROR nova.compute.manager [instance: d38be540-9cd0-428a-b10d-313d2d464b25] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 729.257979] env[62096]: ERROR nova.compute.manager [instance: d38be540-9cd0-428a-b10d-313d2d464b25] return self._exit_event.wait() [ 729.257979] env[62096]: ERROR nova.compute.manager [instance: d38be540-9cd0-428a-b10d-313d2d464b25] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 729.258419] env[62096]: ERROR nova.compute.manager [instance: d38be540-9cd0-428a-b10d-313d2d464b25] result = hub.switch() [ 729.258419] env[62096]: ERROR nova.compute.manager [instance: d38be540-9cd0-428a-b10d-313d2d464b25] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 729.258419] env[62096]: ERROR nova.compute.manager [instance: d38be540-9cd0-428a-b10d-313d2d464b25] return self.greenlet.switch() [ 729.258419] env[62096]: ERROR nova.compute.manager [instance: d38be540-9cd0-428a-b10d-313d2d464b25] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 729.258419] env[62096]: ERROR nova.compute.manager [instance: d38be540-9cd0-428a-b10d-313d2d464b25] result = function(*args, **kwargs) [ 729.258419] env[62096]: ERROR nova.compute.manager [instance: d38be540-9cd0-428a-b10d-313d2d464b25] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 729.258419] env[62096]: ERROR nova.compute.manager [instance: d38be540-9cd0-428a-b10d-313d2d464b25] return func(*args, **kwargs) [ 729.258419] env[62096]: ERROR nova.compute.manager [instance: d38be540-9cd0-428a-b10d-313d2d464b25] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 729.258419] env[62096]: ERROR nova.compute.manager [instance: d38be540-9cd0-428a-b10d-313d2d464b25] raise e [ 729.258419] env[62096]: ERROR nova.compute.manager [instance: d38be540-9cd0-428a-b10d-313d2d464b25] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 729.258419] env[62096]: ERROR nova.compute.manager [instance: d38be540-9cd0-428a-b10d-313d2d464b25] nwinfo = self.network_api.allocate_for_instance( [ 729.258419] env[62096]: ERROR nova.compute.manager [instance: d38be540-9cd0-428a-b10d-313d2d464b25] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 729.258419] env[62096]: ERROR nova.compute.manager [instance: d38be540-9cd0-428a-b10d-313d2d464b25] created_port_ids = self._update_ports_for_instance( [ 729.258850] env[62096]: ERROR nova.compute.manager [instance: d38be540-9cd0-428a-b10d-313d2d464b25] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 729.258850] env[62096]: ERROR nova.compute.manager [instance: d38be540-9cd0-428a-b10d-313d2d464b25] with excutils.save_and_reraise_exception(): [ 729.258850] env[62096]: ERROR nova.compute.manager [instance: d38be540-9cd0-428a-b10d-313d2d464b25] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 729.258850] env[62096]: ERROR nova.compute.manager [instance: d38be540-9cd0-428a-b10d-313d2d464b25] self.force_reraise() [ 729.258850] env[62096]: ERROR nova.compute.manager [instance: d38be540-9cd0-428a-b10d-313d2d464b25] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 729.258850] env[62096]: ERROR nova.compute.manager [instance: d38be540-9cd0-428a-b10d-313d2d464b25] raise self.value [ 729.258850] env[62096]: ERROR nova.compute.manager [instance: d38be540-9cd0-428a-b10d-313d2d464b25] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 729.258850] env[62096]: ERROR nova.compute.manager [instance: d38be540-9cd0-428a-b10d-313d2d464b25] updated_port = self._update_port( [ 729.258850] env[62096]: ERROR nova.compute.manager [instance: d38be540-9cd0-428a-b10d-313d2d464b25] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 729.258850] env[62096]: ERROR nova.compute.manager [instance: d38be540-9cd0-428a-b10d-313d2d464b25] _ensure_no_port_binding_failure(port) [ 729.258850] env[62096]: ERROR nova.compute.manager [instance: d38be540-9cd0-428a-b10d-313d2d464b25] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 729.258850] env[62096]: ERROR nova.compute.manager [instance: d38be540-9cd0-428a-b10d-313d2d464b25] raise exception.PortBindingFailed(port_id=port['id']) [ 729.259341] env[62096]: ERROR nova.compute.manager [instance: d38be540-9cd0-428a-b10d-313d2d464b25] nova.exception.PortBindingFailed: Binding failed for port 1c25f8be-60a8-4ef8-b3db-1095b6a81385, please check neutron logs for more information. [ 729.259341] env[62096]: ERROR nova.compute.manager [instance: d38be540-9cd0-428a-b10d-313d2d464b25] [ 729.259341] env[62096]: INFO nova.compute.manager [None req-5b2f59bb-27e5-498c-aa13-1fed23947587 tempest-ServerRescueTestJSON-190058497 tempest-ServerRescueTestJSON-190058497-project-member] [instance: d38be540-9cd0-428a-b10d-313d2d464b25] Terminating instance [ 729.259341] env[62096]: DEBUG oslo_concurrency.lockutils [None req-5b2f59bb-27e5-498c-aa13-1fed23947587 tempest-ServerRescueTestJSON-190058497 tempest-ServerRescueTestJSON-190058497-project-member] Acquiring lock "refresh_cache-d38be540-9cd0-428a-b10d-313d2d464b25" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 729.259527] env[62096]: DEBUG oslo_concurrency.lockutils [None req-5b2f59bb-27e5-498c-aa13-1fed23947587 tempest-ServerRescueTestJSON-190058497 tempest-ServerRescueTestJSON-190058497-project-member] Acquired lock "refresh_cache-d38be540-9cd0-428a-b10d-313d2d464b25" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 729.259607] env[62096]: DEBUG nova.network.neutron [None req-5b2f59bb-27e5-498c-aa13-1fed23947587 tempest-ServerRescueTestJSON-190058497 tempest-ServerRescueTestJSON-190058497-project-member] [instance: d38be540-9cd0-428a-b10d-313d2d464b25] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 729.260033] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-5b2f59bb-27e5-498c-aa13-1fed23947587 tempest-ServerRescueTestJSON-190058497 tempest-ServerRescueTestJSON-190058497-project-member] Expecting reply to msg 867df944d1b04107ae8cb46684a1270c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 729.268500] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 867df944d1b04107ae8cb46684a1270c [ 729.299336] env[62096]: DEBUG nova.network.neutron [None req-f283c87f-a0d6-46b3-ba4c-0ee08c3c4930 tempest-ImagesOneServerNegativeTestJSON-1092536401 tempest-ImagesOneServerNegativeTestJSON-1092536401-project-member] [instance: 7d62e302-8080-4699-b88d-cb29031e6707] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 729.299836] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-f283c87f-a0d6-46b3-ba4c-0ee08c3c4930 tempest-ImagesOneServerNegativeTestJSON-1092536401 tempest-ImagesOneServerNegativeTestJSON-1092536401-project-member] Expecting reply to msg 1bc2ca56420e4fb29a9d7245f3998b21 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 729.307675] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1bc2ca56420e4fb29a9d7245f3998b21 [ 729.512462] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36bf570b-a31a-4f21-b4aa-39530a12dd57 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.521524] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3316ae44-9496-418d-8e90-8a0130034318 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.551164] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92ab21d4-267c-47cc-b70f-0f78dc35f731 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.558905] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4fd4eb4-b135-4ad1-adec-5a52a1c734b0 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.574226] env[62096]: DEBUG nova.compute.provider_tree [None req-1aa056a4-9f55-42a7-a362-b5b727b382b0 tempest-ServersTestManualDisk-1610766175 tempest-ServersTestManualDisk-1610766175-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 729.574791] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1aa056a4-9f55-42a7-a362-b5b727b382b0 tempest-ServersTestManualDisk-1610766175 tempest-ServersTestManualDisk-1610766175-project-member] Expecting reply to msg dc1e2cea2e2e4c668e387d471bbcc152 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 729.582872] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dc1e2cea2e2e4c668e387d471bbcc152 [ 729.648915] env[62096]: DEBUG nova.network.neutron [-] [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 729.649482] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg f12b57320f2d446c9595b6c5d203667b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 729.658360] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f12b57320f2d446c9595b6c5d203667b [ 729.782180] env[62096]: DEBUG nova.network.neutron [None req-5b2f59bb-27e5-498c-aa13-1fed23947587 tempest-ServerRescueTestJSON-190058497 tempest-ServerRescueTestJSON-190058497-project-member] [instance: d38be540-9cd0-428a-b10d-313d2d464b25] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 729.802354] env[62096]: DEBUG oslo_concurrency.lockutils [None req-f283c87f-a0d6-46b3-ba4c-0ee08c3c4930 tempest-ImagesOneServerNegativeTestJSON-1092536401 tempest-ImagesOneServerNegativeTestJSON-1092536401-project-member] Releasing lock "refresh_cache-7d62e302-8080-4699-b88d-cb29031e6707" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 729.802581] env[62096]: DEBUG nova.compute.manager [None req-f283c87f-a0d6-46b3-ba4c-0ee08c3c4930 tempest-ImagesOneServerNegativeTestJSON-1092536401 tempest-ImagesOneServerNegativeTestJSON-1092536401-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 729.803010] env[62096]: DEBUG nova.compute.manager [None req-f283c87f-a0d6-46b3-ba4c-0ee08c3c4930 tempest-ImagesOneServerNegativeTestJSON-1092536401 tempest-ImagesOneServerNegativeTestJSON-1092536401-project-member] [instance: 7d62e302-8080-4699-b88d-cb29031e6707] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 729.803010] env[62096]: DEBUG nova.network.neutron [None req-f283c87f-a0d6-46b3-ba4c-0ee08c3c4930 tempest-ImagesOneServerNegativeTestJSON-1092536401 tempest-ImagesOneServerNegativeTestJSON-1092536401-project-member] [instance: 7d62e302-8080-4699-b88d-cb29031e6707] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 729.818592] env[62096]: DEBUG nova.network.neutron [None req-f283c87f-a0d6-46b3-ba4c-0ee08c3c4930 tempest-ImagesOneServerNegativeTestJSON-1092536401 tempest-ImagesOneServerNegativeTestJSON-1092536401-project-member] [instance: 7d62e302-8080-4699-b88d-cb29031e6707] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 729.819148] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-f283c87f-a0d6-46b3-ba4c-0ee08c3c4930 tempest-ImagesOneServerNegativeTestJSON-1092536401 tempest-ImagesOneServerNegativeTestJSON-1092536401-project-member] Expecting reply to msg e537a2a94e2d48d0a693b661dc7da81f in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 729.825757] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e537a2a94e2d48d0a693b661dc7da81f [ 729.892515] env[62096]: DEBUG nova.network.neutron [None req-5b2f59bb-27e5-498c-aa13-1fed23947587 tempest-ServerRescueTestJSON-190058497 tempest-ServerRescueTestJSON-190058497-project-member] [instance: d38be540-9cd0-428a-b10d-313d2d464b25] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 729.893049] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-5b2f59bb-27e5-498c-aa13-1fed23947587 tempest-ServerRescueTestJSON-190058497 tempest-ServerRescueTestJSON-190058497-project-member] Expecting reply to msg 61124b0fb4d44f3eb8cd4ff489f9bcc7 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 729.900436] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 61124b0fb4d44f3eb8cd4ff489f9bcc7 [ 730.078507] env[62096]: DEBUG nova.scheduler.client.report [None req-1aa056a4-9f55-42a7-a362-b5b727b382b0 tempest-ServersTestManualDisk-1610766175 tempest-ServersTestManualDisk-1610766175-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 730.080894] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1aa056a4-9f55-42a7-a362-b5b727b382b0 tempest-ServersTestManualDisk-1610766175 tempest-ServersTestManualDisk-1610766175-project-member] Expecting reply to msg 20607f8b8f8b4ac897775f5261a7b547 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 730.095798] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 20607f8b8f8b4ac897775f5261a7b547 [ 730.136872] env[62096]: DEBUG oslo_concurrency.lockutils [None req-6f6a01ff-c14d-41bb-a04a-2330ef16cae3 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Acquiring lock "250ef7e7-266b-451d-8627-9cce211d4e83" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 730.137159] env[62096]: DEBUG oslo_concurrency.lockutils [None req-6f6a01ff-c14d-41bb-a04a-2330ef16cae3 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Lock "250ef7e7-266b-451d-8627-9cce211d4e83" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 730.151443] env[62096]: INFO nova.compute.manager [-] [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] Took 1.03 seconds to deallocate network for instance. [ 730.321943] env[62096]: DEBUG nova.network.neutron [None req-f283c87f-a0d6-46b3-ba4c-0ee08c3c4930 tempest-ImagesOneServerNegativeTestJSON-1092536401 tempest-ImagesOneServerNegativeTestJSON-1092536401-project-member] [instance: 7d62e302-8080-4699-b88d-cb29031e6707] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 730.322494] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-f283c87f-a0d6-46b3-ba4c-0ee08c3c4930 tempest-ImagesOneServerNegativeTestJSON-1092536401 tempest-ImagesOneServerNegativeTestJSON-1092536401-project-member] Expecting reply to msg 74bdba57667d429aaf5bde9c3685d69d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 730.330934] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 74bdba57667d429aaf5bde9c3685d69d [ 730.395430] env[62096]: DEBUG oslo_concurrency.lockutils [None req-5b2f59bb-27e5-498c-aa13-1fed23947587 tempest-ServerRescueTestJSON-190058497 tempest-ServerRescueTestJSON-190058497-project-member] Releasing lock "refresh_cache-d38be540-9cd0-428a-b10d-313d2d464b25" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 730.395851] env[62096]: DEBUG nova.compute.manager [None req-5b2f59bb-27e5-498c-aa13-1fed23947587 tempest-ServerRescueTestJSON-190058497 tempest-ServerRescueTestJSON-190058497-project-member] [instance: d38be540-9cd0-428a-b10d-313d2d464b25] Start destroying the instance on the hypervisor. {{(pid=62096) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 730.396055] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-5b2f59bb-27e5-498c-aa13-1fed23947587 tempest-ServerRescueTestJSON-190058497 tempest-ServerRescueTestJSON-190058497-project-member] [instance: d38be540-9cd0-428a-b10d-313d2d464b25] Destroying instance {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 730.396700] env[62096]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0ccd47be-50c7-416d-b659-7cc2d8cf9c29 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.406544] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74e34c38-81ae-4768-b98f-c23bd37b9943 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.429031] env[62096]: WARNING nova.virt.vmwareapi.vmops [None req-5b2f59bb-27e5-498c-aa13-1fed23947587 tempest-ServerRescueTestJSON-190058497 tempest-ServerRescueTestJSON-190058497-project-member] [instance: d38be540-9cd0-428a-b10d-313d2d464b25] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d38be540-9cd0-428a-b10d-313d2d464b25 could not be found. [ 730.429414] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-5b2f59bb-27e5-498c-aa13-1fed23947587 tempest-ServerRescueTestJSON-190058497 tempest-ServerRescueTestJSON-190058497-project-member] [instance: d38be540-9cd0-428a-b10d-313d2d464b25] Instance destroyed {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 730.429707] env[62096]: INFO nova.compute.manager [None req-5b2f59bb-27e5-498c-aa13-1fed23947587 tempest-ServerRescueTestJSON-190058497 tempest-ServerRescueTestJSON-190058497-project-member] [instance: d38be540-9cd0-428a-b10d-313d2d464b25] Took 0.03 seconds to destroy the instance on the hypervisor. [ 730.430074] env[62096]: DEBUG oslo.service.loopingcall [None req-5b2f59bb-27e5-498c-aa13-1fed23947587 tempest-ServerRescueTestJSON-190058497 tempest-ServerRescueTestJSON-190058497-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 730.430418] env[62096]: DEBUG nova.compute.manager [-] [instance: d38be540-9cd0-428a-b10d-313d2d464b25] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 730.430622] env[62096]: DEBUG nova.network.neutron [-] [instance: d38be540-9cd0-428a-b10d-313d2d464b25] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 730.443940] env[62096]: DEBUG nova.network.neutron [-] [instance: d38be540-9cd0-428a-b10d-313d2d464b25] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 730.444538] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 91703dd1da644f6a889a442ed426ed15 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 730.451630] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 91703dd1da644f6a889a442ed426ed15 [ 730.583402] env[62096]: DEBUG oslo_concurrency.lockutils [None req-1aa056a4-9f55-42a7-a362-b5b727b382b0 tempest-ServersTestManualDisk-1610766175 tempest-ServersTestManualDisk-1610766175-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.029s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 730.584798] env[62096]: ERROR nova.compute.manager [None req-1aa056a4-9f55-42a7-a362-b5b727b382b0 tempest-ServersTestManualDisk-1610766175 tempest-ServersTestManualDisk-1610766175-project-member] [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 04372f13-66f6-4584-8b4a-3d7edb00fb9a, please check neutron logs for more information. [ 730.584798] env[62096]: ERROR nova.compute.manager [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] Traceback (most recent call last): [ 730.584798] env[62096]: ERROR nova.compute.manager [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 730.584798] env[62096]: ERROR nova.compute.manager [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] self.driver.spawn(context, instance, image_meta, [ 730.584798] env[62096]: ERROR nova.compute.manager [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 730.584798] env[62096]: ERROR nova.compute.manager [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] self._vmops.spawn(context, instance, image_meta, injected_files, [ 730.584798] env[62096]: ERROR nova.compute.manager [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 730.584798] env[62096]: ERROR nova.compute.manager [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] vm_ref = self.build_virtual_machine(instance, [ 730.584798] env[62096]: ERROR nova.compute.manager [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 730.584798] env[62096]: ERROR nova.compute.manager [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] vif_infos = vmwarevif.get_vif_info(self._session, [ 730.584798] env[62096]: ERROR nova.compute.manager [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 730.585193] env[62096]: ERROR nova.compute.manager [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] for vif in network_info: [ 730.585193] env[62096]: ERROR nova.compute.manager [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 730.585193] env[62096]: ERROR nova.compute.manager [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] return self._sync_wrapper(fn, *args, **kwargs) [ 730.585193] env[62096]: ERROR nova.compute.manager [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 730.585193] env[62096]: ERROR nova.compute.manager [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] self.wait() [ 730.585193] env[62096]: ERROR nova.compute.manager [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 730.585193] env[62096]: ERROR nova.compute.manager [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] self[:] = self._gt.wait() [ 730.585193] env[62096]: ERROR nova.compute.manager [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 730.585193] env[62096]: ERROR nova.compute.manager [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] return self._exit_event.wait() [ 730.585193] env[62096]: ERROR nova.compute.manager [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 730.585193] env[62096]: ERROR nova.compute.manager [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] result = hub.switch() [ 730.585193] env[62096]: ERROR nova.compute.manager [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 730.585193] env[62096]: ERROR nova.compute.manager [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] return self.greenlet.switch() [ 730.585601] env[62096]: ERROR nova.compute.manager [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 730.585601] env[62096]: ERROR nova.compute.manager [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] result = function(*args, **kwargs) [ 730.585601] env[62096]: ERROR nova.compute.manager [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 730.585601] env[62096]: ERROR nova.compute.manager [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] return func(*args, **kwargs) [ 730.585601] env[62096]: ERROR nova.compute.manager [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 730.585601] env[62096]: ERROR nova.compute.manager [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] raise e [ 730.585601] env[62096]: ERROR nova.compute.manager [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 730.585601] env[62096]: ERROR nova.compute.manager [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] nwinfo = self.network_api.allocate_for_instance( [ 730.585601] env[62096]: ERROR nova.compute.manager [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 730.585601] env[62096]: ERROR nova.compute.manager [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] created_port_ids = self._update_ports_for_instance( [ 730.585601] env[62096]: ERROR nova.compute.manager [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 730.585601] env[62096]: ERROR nova.compute.manager [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] with excutils.save_and_reraise_exception(): [ 730.585601] env[62096]: ERROR nova.compute.manager [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 730.585956] env[62096]: ERROR nova.compute.manager [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] self.force_reraise() [ 730.585956] env[62096]: ERROR nova.compute.manager [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 730.585956] env[62096]: ERROR nova.compute.manager [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] raise self.value [ 730.585956] env[62096]: ERROR nova.compute.manager [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 730.585956] env[62096]: ERROR nova.compute.manager [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] updated_port = self._update_port( [ 730.585956] env[62096]: ERROR nova.compute.manager [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 730.585956] env[62096]: ERROR nova.compute.manager [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] _ensure_no_port_binding_failure(port) [ 730.585956] env[62096]: ERROR nova.compute.manager [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 730.585956] env[62096]: ERROR nova.compute.manager [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] raise exception.PortBindingFailed(port_id=port['id']) [ 730.585956] env[62096]: ERROR nova.compute.manager [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] nova.exception.PortBindingFailed: Binding failed for port 04372f13-66f6-4584-8b4a-3d7edb00fb9a, please check neutron logs for more information. [ 730.585956] env[62096]: ERROR nova.compute.manager [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] [ 730.586316] env[62096]: DEBUG nova.compute.utils [None req-1aa056a4-9f55-42a7-a362-b5b727b382b0 tempest-ServersTestManualDisk-1610766175 tempest-ServersTestManualDisk-1610766175-project-member] [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] Binding failed for port 04372f13-66f6-4584-8b4a-3d7edb00fb9a, please check neutron logs for more information. {{(pid=62096) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 730.587916] env[62096]: DEBUG oslo_concurrency.lockutils [None req-d841432a-bb6a-4a14-a0d1-d583a8a5e9c6 tempest-ServerAddressesNegativeTestJSON-1360856578 tempest-ServerAddressesNegativeTestJSON-1360856578-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.397s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 730.588694] env[62096]: INFO nova.compute.claims [None req-d841432a-bb6a-4a14-a0d1-d583a8a5e9c6 tempest-ServerAddressesNegativeTestJSON-1360856578 tempest-ServerAddressesNegativeTestJSON-1360856578-project-member] [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 730.590351] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d841432a-bb6a-4a14-a0d1-d583a8a5e9c6 tempest-ServerAddressesNegativeTestJSON-1360856578 tempest-ServerAddressesNegativeTestJSON-1360856578-project-member] Expecting reply to msg 754616b3108146eba00b0b64af01407c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 730.591517] env[62096]: DEBUG nova.compute.manager [None req-1aa056a4-9f55-42a7-a362-b5b727b382b0 tempest-ServersTestManualDisk-1610766175 tempest-ServersTestManualDisk-1610766175-project-member] [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] Build of instance 3480c79b-58e4-4759-acd4-b2f45f22da54 was re-scheduled: Binding failed for port 04372f13-66f6-4584-8b4a-3d7edb00fb9a, please check neutron logs for more information. {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 730.592102] env[62096]: DEBUG nova.compute.manager [None req-1aa056a4-9f55-42a7-a362-b5b727b382b0 tempest-ServersTestManualDisk-1610766175 tempest-ServersTestManualDisk-1610766175-project-member] [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] Unplugging VIFs for instance {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 730.592334] env[62096]: DEBUG oslo_concurrency.lockutils [None req-1aa056a4-9f55-42a7-a362-b5b727b382b0 tempest-ServersTestManualDisk-1610766175 tempest-ServersTestManualDisk-1610766175-project-member] Acquiring lock "refresh_cache-3480c79b-58e4-4759-acd4-b2f45f22da54" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 730.592479] env[62096]: DEBUG oslo_concurrency.lockutils [None req-1aa056a4-9f55-42a7-a362-b5b727b382b0 tempest-ServersTestManualDisk-1610766175 tempest-ServersTestManualDisk-1610766175-project-member] Acquired lock "refresh_cache-3480c79b-58e4-4759-acd4-b2f45f22da54" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 730.592650] env[62096]: DEBUG nova.network.neutron [None req-1aa056a4-9f55-42a7-a362-b5b727b382b0 tempest-ServersTestManualDisk-1610766175 tempest-ServersTestManualDisk-1610766175-project-member] [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 730.592985] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1aa056a4-9f55-42a7-a362-b5b727b382b0 tempest-ServersTestManualDisk-1610766175 tempest-ServersTestManualDisk-1610766175-project-member] Expecting reply to msg a5f0b1686fb5404a817a590d9859431f in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 730.600417] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a5f0b1686fb5404a817a590d9859431f [ 730.622253] env[62096]: DEBUG nova.compute.manager [req-aee43c29-ba36-49c9-b2a0-21cd1c1b734f req-942f2dbc-73af-4bc3-9973-f4d0a1cae040 service nova] [instance: d38be540-9cd0-428a-b10d-313d2d464b25] Received event network-changed-1c25f8be-60a8-4ef8-b3db-1095b6a81385 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 730.622253] env[62096]: DEBUG nova.compute.manager [req-aee43c29-ba36-49c9-b2a0-21cd1c1b734f req-942f2dbc-73af-4bc3-9973-f4d0a1cae040 service nova] [instance: d38be540-9cd0-428a-b10d-313d2d464b25] Refreshing instance network info cache due to event network-changed-1c25f8be-60a8-4ef8-b3db-1095b6a81385. {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 730.622253] env[62096]: DEBUG oslo_concurrency.lockutils [req-aee43c29-ba36-49c9-b2a0-21cd1c1b734f req-942f2dbc-73af-4bc3-9973-f4d0a1cae040 service nova] Acquiring lock "refresh_cache-d38be540-9cd0-428a-b10d-313d2d464b25" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 730.622253] env[62096]: DEBUG oslo_concurrency.lockutils [req-aee43c29-ba36-49c9-b2a0-21cd1c1b734f req-942f2dbc-73af-4bc3-9973-f4d0a1cae040 service nova] Acquired lock "refresh_cache-d38be540-9cd0-428a-b10d-313d2d464b25" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 730.622253] env[62096]: DEBUG nova.network.neutron [req-aee43c29-ba36-49c9-b2a0-21cd1c1b734f req-942f2dbc-73af-4bc3-9973-f4d0a1cae040 service nova] [instance: d38be540-9cd0-428a-b10d-313d2d464b25] Refreshing network info cache for port 1c25f8be-60a8-4ef8-b3db-1095b6a81385 {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 730.622451] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-aee43c29-ba36-49c9-b2a0-21cd1c1b734f req-942f2dbc-73af-4bc3-9973-f4d0a1cae040 service nova] Expecting reply to msg 82971ca0b64b4595ab1a9499f7f6b0bf in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 730.629066] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 754616b3108146eba00b0b64af01407c [ 730.630269] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 82971ca0b64b4595ab1a9499f7f6b0bf [ 730.698630] env[62096]: INFO nova.compute.manager [None req-31582930-cf6e-43cb-a675-23470985f47b tempest-ServerActionsV293TestJSON-192366267 tempest-ServerActionsV293TestJSON-192366267-project-member] [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] Took 0.55 seconds to detach 1 volumes for instance. [ 730.701689] env[62096]: DEBUG nova.compute.claims [None req-31582930-cf6e-43cb-a675-23470985f47b tempest-ServerActionsV293TestJSON-192366267 tempest-ServerActionsV293TestJSON-192366267-project-member] [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] Aborting claim: {{(pid=62096) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 730.701868] env[62096]: DEBUG oslo_concurrency.lockutils [None req-31582930-cf6e-43cb-a675-23470985f47b tempest-ServerActionsV293TestJSON-192366267 tempest-ServerActionsV293TestJSON-192366267-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 730.824930] env[62096]: INFO nova.compute.manager [None req-f283c87f-a0d6-46b3-ba4c-0ee08c3c4930 tempest-ImagesOneServerNegativeTestJSON-1092536401 tempest-ImagesOneServerNegativeTestJSON-1092536401-project-member] [instance: 7d62e302-8080-4699-b88d-cb29031e6707] Took 1.02 seconds to deallocate network for instance. [ 730.826726] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-f283c87f-a0d6-46b3-ba4c-0ee08c3c4930 tempest-ImagesOneServerNegativeTestJSON-1092536401 tempest-ImagesOneServerNegativeTestJSON-1092536401-project-member] Expecting reply to msg 35db68a303774e8eb47120722fb1dd04 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 730.864564] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 35db68a303774e8eb47120722fb1dd04 [ 730.946396] env[62096]: DEBUG nova.network.neutron [-] [instance: d38be540-9cd0-428a-b10d-313d2d464b25] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 730.946899] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 31727590d41442a4b6e373120211a70b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 730.955895] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 31727590d41442a4b6e373120211a70b [ 731.095653] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d841432a-bb6a-4a14-a0d1-d583a8a5e9c6 tempest-ServerAddressesNegativeTestJSON-1360856578 tempest-ServerAddressesNegativeTestJSON-1360856578-project-member] Expecting reply to msg 7622297eb1f449a5b970c54d7ca4aaf4 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 731.103570] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7622297eb1f449a5b970c54d7ca4aaf4 [ 731.112698] env[62096]: DEBUG nova.network.neutron [None req-1aa056a4-9f55-42a7-a362-b5b727b382b0 tempest-ServersTestManualDisk-1610766175 tempest-ServersTestManualDisk-1610766175-project-member] [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 731.146403] env[62096]: DEBUG nova.network.neutron [req-aee43c29-ba36-49c9-b2a0-21cd1c1b734f req-942f2dbc-73af-4bc3-9973-f4d0a1cae040 service nova] [instance: d38be540-9cd0-428a-b10d-313d2d464b25] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 731.297989] env[62096]: DEBUG nova.network.neutron [None req-1aa056a4-9f55-42a7-a362-b5b727b382b0 tempest-ServersTestManualDisk-1610766175 tempest-ServersTestManualDisk-1610766175-project-member] [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 731.298722] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1aa056a4-9f55-42a7-a362-b5b727b382b0 tempest-ServersTestManualDisk-1610766175 tempest-ServersTestManualDisk-1610766175-project-member] Expecting reply to msg acaa4eb04572482e8482a0866d3a489e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 731.301771] env[62096]: DEBUG nova.network.neutron [req-aee43c29-ba36-49c9-b2a0-21cd1c1b734f req-942f2dbc-73af-4bc3-9973-f4d0a1cae040 service nova] [instance: d38be540-9cd0-428a-b10d-313d2d464b25] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 731.302240] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-aee43c29-ba36-49c9-b2a0-21cd1c1b734f req-942f2dbc-73af-4bc3-9973-f4d0a1cae040 service nova] Expecting reply to msg e3ab68804e8145eebeb55fbf249a7e07 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 731.309246] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg acaa4eb04572482e8482a0866d3a489e [ 731.311255] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e3ab68804e8145eebeb55fbf249a7e07 [ 731.331133] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-f283c87f-a0d6-46b3-ba4c-0ee08c3c4930 tempest-ImagesOneServerNegativeTestJSON-1092536401 tempest-ImagesOneServerNegativeTestJSON-1092536401-project-member] Expecting reply to msg 42a031b812a8489ab15181e403c35f3d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 731.365439] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 42a031b812a8489ab15181e403c35f3d [ 731.448820] env[62096]: INFO nova.compute.manager [-] [instance: d38be540-9cd0-428a-b10d-313d2d464b25] Took 1.02 seconds to deallocate network for instance. [ 731.451075] env[62096]: DEBUG nova.compute.claims [None req-5b2f59bb-27e5-498c-aa13-1fed23947587 tempest-ServerRescueTestJSON-190058497 tempest-ServerRescueTestJSON-190058497-project-member] [instance: d38be540-9cd0-428a-b10d-313d2d464b25] Aborting claim: {{(pid=62096) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 731.451255] env[62096]: DEBUG oslo_concurrency.lockutils [None req-5b2f59bb-27e5-498c-aa13-1fed23947587 tempest-ServerRescueTestJSON-190058497 tempest-ServerRescueTestJSON-190058497-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 731.800631] env[62096]: DEBUG oslo_concurrency.lockutils [None req-1aa056a4-9f55-42a7-a362-b5b727b382b0 tempest-ServersTestManualDisk-1610766175 tempest-ServersTestManualDisk-1610766175-project-member] Releasing lock "refresh_cache-3480c79b-58e4-4759-acd4-b2f45f22da54" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 731.800842] env[62096]: DEBUG nova.compute.manager [None req-1aa056a4-9f55-42a7-a362-b5b727b382b0 tempest-ServersTestManualDisk-1610766175 tempest-ServersTestManualDisk-1610766175-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 731.802146] env[62096]: DEBUG nova.compute.manager [None req-1aa056a4-9f55-42a7-a362-b5b727b382b0 tempest-ServersTestManualDisk-1610766175 tempest-ServersTestManualDisk-1610766175-project-member] [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 731.802378] env[62096]: DEBUG nova.network.neutron [None req-1aa056a4-9f55-42a7-a362-b5b727b382b0 tempest-ServersTestManualDisk-1610766175 tempest-ServersTestManualDisk-1610766175-project-member] [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 731.804311] env[62096]: DEBUG oslo_concurrency.lockutils [req-aee43c29-ba36-49c9-b2a0-21cd1c1b734f req-942f2dbc-73af-4bc3-9973-f4d0a1cae040 service nova] Releasing lock "refresh_cache-d38be540-9cd0-428a-b10d-313d2d464b25" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 731.804525] env[62096]: DEBUG nova.compute.manager [req-aee43c29-ba36-49c9-b2a0-21cd1c1b734f req-942f2dbc-73af-4bc3-9973-f4d0a1cae040 service nova] [instance: d38be540-9cd0-428a-b10d-313d2d464b25] Received event network-vif-deleted-1c25f8be-60a8-4ef8-b3db-1095b6a81385 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 731.836890] env[62096]: DEBUG nova.network.neutron [None req-1aa056a4-9f55-42a7-a362-b5b727b382b0 tempest-ServersTestManualDisk-1610766175 tempest-ServersTestManualDisk-1610766175-project-member] [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 731.837432] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1aa056a4-9f55-42a7-a362-b5b727b382b0 tempest-ServersTestManualDisk-1610766175 tempest-ServersTestManualDisk-1610766175-project-member] Expecting reply to msg c137be9f3da043f6921890a47e811c4c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 731.844668] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c137be9f3da043f6921890a47e811c4c [ 731.862244] env[62096]: INFO nova.scheduler.client.report [None req-f283c87f-a0d6-46b3-ba4c-0ee08c3c4930 tempest-ImagesOneServerNegativeTestJSON-1092536401 tempest-ImagesOneServerNegativeTestJSON-1092536401-project-member] Deleted allocations for instance 7d62e302-8080-4699-b88d-cb29031e6707 [ 731.872500] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-f283c87f-a0d6-46b3-ba4c-0ee08c3c4930 tempest-ImagesOneServerNegativeTestJSON-1092536401 tempest-ImagesOneServerNegativeTestJSON-1092536401-project-member] Expecting reply to msg 66051778c8e84c54a99c432ae90706af in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 731.883744] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 66051778c8e84c54a99c432ae90706af [ 732.001059] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47382ba7-1324-4f4d-898d-f2cee75bdff6 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.008747] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d3e247b-7d85-4512-98d7-73494842a8df {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.044109] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-023d5d0e-a098-411b-b255-641700f97bb3 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.050097] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe6788ad-cc20-4022-9193-0eb7e05a3770 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.067863] env[62096]: DEBUG nova.compute.provider_tree [None req-d841432a-bb6a-4a14-a0d1-d583a8a5e9c6 tempest-ServerAddressesNegativeTestJSON-1360856578 tempest-ServerAddressesNegativeTestJSON-1360856578-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 732.067975] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d841432a-bb6a-4a14-a0d1-d583a8a5e9c6 tempest-ServerAddressesNegativeTestJSON-1360856578 tempest-ServerAddressesNegativeTestJSON-1360856578-project-member] Expecting reply to msg 39acaf4635414fd0b0ad7fbe3a7d6f8e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 732.077402] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 39acaf4635414fd0b0ad7fbe3a7d6f8e [ 732.156505] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg d09b200f20a4495eb04720ba0c5fd864 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 732.169440] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d09b200f20a4495eb04720ba0c5fd864 [ 732.339622] env[62096]: DEBUG nova.network.neutron [None req-1aa056a4-9f55-42a7-a362-b5b727b382b0 tempest-ServersTestManualDisk-1610766175 tempest-ServersTestManualDisk-1610766175-project-member] [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 732.340639] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1aa056a4-9f55-42a7-a362-b5b727b382b0 tempest-ServersTestManualDisk-1610766175 tempest-ServersTestManualDisk-1610766175-project-member] Expecting reply to msg 7e2b570aae1e4985aed5ce8fecdc76fc in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 732.350916] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7e2b570aae1e4985aed5ce8fecdc76fc [ 732.376135] env[62096]: DEBUG oslo_concurrency.lockutils [None req-f283c87f-a0d6-46b3-ba4c-0ee08c3c4930 tempest-ImagesOneServerNegativeTestJSON-1092536401 tempest-ImagesOneServerNegativeTestJSON-1092536401-project-member] Lock "7d62e302-8080-4699-b88d-cb29031e6707" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 143.640s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 732.376946] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-f7b2ac44-6793-4b58-b560-5a800041e557 tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Expecting reply to msg e6f09c4767274a87a1a8a77b52d9e68f in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 732.386879] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e6f09c4767274a87a1a8a77b52d9e68f [ 732.571821] env[62096]: DEBUG nova.scheduler.client.report [None req-d841432a-bb6a-4a14-a0d1-d583a8a5e9c6 tempest-ServerAddressesNegativeTestJSON-1360856578 tempest-ServerAddressesNegativeTestJSON-1360856578-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 732.574775] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d841432a-bb6a-4a14-a0d1-d583a8a5e9c6 tempest-ServerAddressesNegativeTestJSON-1360856578 tempest-ServerAddressesNegativeTestJSON-1360856578-project-member] Expecting reply to msg fb475d5288214df494e4eabccadce93a in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 732.586587] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fb475d5288214df494e4eabccadce93a [ 732.846863] env[62096]: INFO nova.compute.manager [None req-1aa056a4-9f55-42a7-a362-b5b727b382b0 tempest-ServersTestManualDisk-1610766175 tempest-ServersTestManualDisk-1610766175-project-member] [instance: 3480c79b-58e4-4759-acd4-b2f45f22da54] Took 1.04 seconds to deallocate network for instance. [ 732.846863] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1aa056a4-9f55-42a7-a362-b5b727b382b0 tempest-ServersTestManualDisk-1610766175 tempest-ServersTestManualDisk-1610766175-project-member] Expecting reply to msg 006e99b6049046afa7900c2909f502c5 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 732.882468] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 006e99b6049046afa7900c2909f502c5 [ 732.882468] env[62096]: DEBUG nova.compute.manager [None req-f7b2ac44-6793-4b58-b560-5a800041e557 tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] Starting instance... {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 732.882468] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-f7b2ac44-6793-4b58-b560-5a800041e557 tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Expecting reply to msg d5b9909c8b1946758e0236028ab9fab3 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 732.912059] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d5b9909c8b1946758e0236028ab9fab3 [ 733.083479] env[62096]: DEBUG oslo_concurrency.lockutils [None req-d841432a-bb6a-4a14-a0d1-d583a8a5e9c6 tempest-ServerAddressesNegativeTestJSON-1360856578 tempest-ServerAddressesNegativeTestJSON-1360856578-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.490s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 733.083479] env[62096]: DEBUG nova.compute.manager [None req-d841432a-bb6a-4a14-a0d1-d583a8a5e9c6 tempest-ServerAddressesNegativeTestJSON-1360856578 tempest-ServerAddressesNegativeTestJSON-1360856578-project-member] [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] Start building networks asynchronously for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 733.083479] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d841432a-bb6a-4a14-a0d1-d583a8a5e9c6 tempest-ServerAddressesNegativeTestJSON-1360856578 tempest-ServerAddressesNegativeTestJSON-1360856578-project-member] Expecting reply to msg 835ea21139794704a39a9b2af80eaf0d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 733.083479] env[62096]: DEBUG oslo_concurrency.lockutils [None req-c8a8569a-9a38-4dba-b714-875ef3e36f76 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.128s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 733.086752] env[62096]: INFO nova.compute.claims [None req-c8a8569a-9a38-4dba-b714-875ef3e36f76 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] [instance: 321ab95b-6221-4bab-b442-a90926098dae] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 733.086752] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c8a8569a-9a38-4dba-b714-875ef3e36f76 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Expecting reply to msg 2412b2a97fbf47af8f09596f8e57a43c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 733.123522] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2412b2a97fbf47af8f09596f8e57a43c [ 733.123522] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 835ea21139794704a39a9b2af80eaf0d [ 733.352036] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1aa056a4-9f55-42a7-a362-b5b727b382b0 tempest-ServersTestManualDisk-1610766175 tempest-ServersTestManualDisk-1610766175-project-member] Expecting reply to msg 9113d7026b27415698ee1e39fa2890f0 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 733.398073] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9113d7026b27415698ee1e39fa2890f0 [ 733.407108] env[62096]: DEBUG oslo_concurrency.lockutils [None req-f7b2ac44-6793-4b58-b560-5a800041e557 tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 733.589512] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c8a8569a-9a38-4dba-b714-875ef3e36f76 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Expecting reply to msg de3bc41ea43c4c8498c0fe449265034b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 733.591133] env[62096]: DEBUG nova.compute.utils [None req-d841432a-bb6a-4a14-a0d1-d583a8a5e9c6 tempest-ServerAddressesNegativeTestJSON-1360856578 tempest-ServerAddressesNegativeTestJSON-1360856578-project-member] Using /dev/sd instead of None {{(pid=62096) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 733.591697] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d841432a-bb6a-4a14-a0d1-d583a8a5e9c6 tempest-ServerAddressesNegativeTestJSON-1360856578 tempest-ServerAddressesNegativeTestJSON-1360856578-project-member] Expecting reply to msg f2af56cfad7d44189d590f1337107d17 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 733.592846] env[62096]: DEBUG nova.compute.manager [None req-d841432a-bb6a-4a14-a0d1-d583a8a5e9c6 tempest-ServerAddressesNegativeTestJSON-1360856578 tempest-ServerAddressesNegativeTestJSON-1360856578-project-member] [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] Allocating IP information in the background. {{(pid=62096) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 733.593015] env[62096]: DEBUG nova.network.neutron [None req-d841432a-bb6a-4a14-a0d1-d583a8a5e9c6 tempest-ServerAddressesNegativeTestJSON-1360856578 tempest-ServerAddressesNegativeTestJSON-1360856578-project-member] [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] allocate_for_instance() {{(pid=62096) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 733.602200] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg de3bc41ea43c4c8498c0fe449265034b [ 733.604512] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f2af56cfad7d44189d590f1337107d17 [ 733.658107] env[62096]: DEBUG nova.policy [None req-d841432a-bb6a-4a14-a0d1-d583a8a5e9c6 tempest-ServerAddressesNegativeTestJSON-1360856578 tempest-ServerAddressesNegativeTestJSON-1360856578-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9a42814ec3394d40a3db158e79e2a4f1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e5300a04f2c344b18ada4864c60bde9f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62096) authorize /opt/stack/nova/nova/policy.py:203}} [ 733.879330] env[62096]: INFO nova.scheduler.client.report [None req-1aa056a4-9f55-42a7-a362-b5b727b382b0 tempest-ServersTestManualDisk-1610766175 tempest-ServersTestManualDisk-1610766175-project-member] Deleted allocations for instance 3480c79b-58e4-4759-acd4-b2f45f22da54 [ 733.889961] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1aa056a4-9f55-42a7-a362-b5b727b382b0 tempest-ServersTestManualDisk-1610766175 tempest-ServersTestManualDisk-1610766175-project-member] Expecting reply to msg 96e578b429a64af78b1eedee2687c03f in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 733.904996] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 96e578b429a64af78b1eedee2687c03f [ 734.099141] env[62096]: DEBUG nova.compute.manager [None req-d841432a-bb6a-4a14-a0d1-d583a8a5e9c6 tempest-ServerAddressesNegativeTestJSON-1360856578 tempest-ServerAddressesNegativeTestJSON-1360856578-project-member] [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] Start building block device mappings for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 734.100758] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d841432a-bb6a-4a14-a0d1-d583a8a5e9c6 tempest-ServerAddressesNegativeTestJSON-1360856578 tempest-ServerAddressesNegativeTestJSON-1360856578-project-member] Expecting reply to msg 6bf7304ffc574298b30b0ca2543e4a87 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 734.135933] env[62096]: DEBUG nova.network.neutron [None req-d841432a-bb6a-4a14-a0d1-d583a8a5e9c6 tempest-ServerAddressesNegativeTestJSON-1360856578 tempest-ServerAddressesNegativeTestJSON-1360856578-project-member] [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] Successfully created port: e1e9548f-8e05-4826-85a0-6602fd03e638 {{(pid=62096) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 734.141060] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6bf7304ffc574298b30b0ca2543e4a87 [ 734.393449] env[62096]: DEBUG oslo_concurrency.lockutils [None req-1aa056a4-9f55-42a7-a362-b5b727b382b0 tempest-ServersTestManualDisk-1610766175 tempest-ServersTestManualDisk-1610766175-project-member] Lock "3480c79b-58e4-4759-acd4-b2f45f22da54" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 144.461s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 734.394045] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-5fe8118b-4ebb-4166-ab18-d024b5229d8a tempest-ServerRescueTestJSONUnderV235-1957248754 tempest-ServerRescueTestJSONUnderV235-1957248754-project-member] Expecting reply to msg d81d7e264a5549a28802ec43a729d0fb in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 734.408324] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d81d7e264a5549a28802ec43a729d0fb [ 734.470732] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0551cdcf-b474-4ea0-b6d6-f4d9723f5a33 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.479018] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47030253-9aed-4ba4-ac70-d3afa19407a6 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.510392] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd16c478-f837-41d4-bd6b-048310512851 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.517973] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94b4c0be-1966-411b-b6b5-2677f371ae0e {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.532053] env[62096]: DEBUG nova.compute.provider_tree [None req-c8a8569a-9a38-4dba-b714-875ef3e36f76 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 734.532586] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c8a8569a-9a38-4dba-b714-875ef3e36f76 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Expecting reply to msg 1bb8c94f1ab94063a2318ff3e0132d19 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 734.539738] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1bb8c94f1ab94063a2318ff3e0132d19 [ 734.605589] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d841432a-bb6a-4a14-a0d1-d583a8a5e9c6 tempest-ServerAddressesNegativeTestJSON-1360856578 tempest-ServerAddressesNegativeTestJSON-1360856578-project-member] Expecting reply to msg 188d498e07554656b01a320996009baa in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 734.641245] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 188d498e07554656b01a320996009baa [ 734.899900] env[62096]: DEBUG nova.compute.manager [None req-5fe8118b-4ebb-4166-ab18-d024b5229d8a tempest-ServerRescueTestJSONUnderV235-1957248754 tempest-ServerRescueTestJSONUnderV235-1957248754-project-member] [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] Starting instance... {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 734.899900] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-5fe8118b-4ebb-4166-ab18-d024b5229d8a tempest-ServerRescueTestJSONUnderV235-1957248754 tempest-ServerRescueTestJSONUnderV235-1957248754-project-member] Expecting reply to msg 6998bcb09cbc454ca04a79f15fc65a22 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 734.945491] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6998bcb09cbc454ca04a79f15fc65a22 [ 735.035729] env[62096]: DEBUG nova.scheduler.client.report [None req-c8a8569a-9a38-4dba-b714-875ef3e36f76 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 735.038572] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c8a8569a-9a38-4dba-b714-875ef3e36f76 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Expecting reply to msg bce2b7b1f6c3453fb857ab494c223a4e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 735.050540] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bce2b7b1f6c3453fb857ab494c223a4e [ 735.108869] env[62096]: DEBUG nova.compute.manager [None req-d841432a-bb6a-4a14-a0d1-d583a8a5e9c6 tempest-ServerAddressesNegativeTestJSON-1360856578 tempest-ServerAddressesNegativeTestJSON-1360856578-project-member] [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] Start spawning the instance on the hypervisor. {{(pid=62096) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 735.139890] env[62096]: DEBUG nova.virt.hardware [None req-d841432a-bb6a-4a14-a0d1-d583a8a5e9c6 tempest-ServerAddressesNegativeTestJSON-1360856578 tempest-ServerAddressesNegativeTestJSON-1360856578-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-19T11:52:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-19T11:51:55Z,direct_url=,disk_format='vmdk',id=fb764baa-3805-45c4-a694-aa91b0932110,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bd76085d004b408bbcf1ebf76ba2593e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-19T11:51:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 735.140189] env[62096]: DEBUG nova.virt.hardware [None req-d841432a-bb6a-4a14-a0d1-d583a8a5e9c6 tempest-ServerAddressesNegativeTestJSON-1360856578 tempest-ServerAddressesNegativeTestJSON-1360856578-project-member] Flavor limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 735.140363] env[62096]: DEBUG nova.virt.hardware [None req-d841432a-bb6a-4a14-a0d1-d583a8a5e9c6 tempest-ServerAddressesNegativeTestJSON-1360856578 tempest-ServerAddressesNegativeTestJSON-1360856578-project-member] Image limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 735.140545] env[62096]: DEBUG nova.virt.hardware [None req-d841432a-bb6a-4a14-a0d1-d583a8a5e9c6 tempest-ServerAddressesNegativeTestJSON-1360856578 tempest-ServerAddressesNegativeTestJSON-1360856578-project-member] Flavor pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 735.140689] env[62096]: DEBUG nova.virt.hardware [None req-d841432a-bb6a-4a14-a0d1-d583a8a5e9c6 tempest-ServerAddressesNegativeTestJSON-1360856578 tempest-ServerAddressesNegativeTestJSON-1360856578-project-member] Image pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 735.140833] env[62096]: DEBUG nova.virt.hardware [None req-d841432a-bb6a-4a14-a0d1-d583a8a5e9c6 tempest-ServerAddressesNegativeTestJSON-1360856578 tempest-ServerAddressesNegativeTestJSON-1360856578-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 735.141037] env[62096]: DEBUG nova.virt.hardware [None req-d841432a-bb6a-4a14-a0d1-d583a8a5e9c6 tempest-ServerAddressesNegativeTestJSON-1360856578 tempest-ServerAddressesNegativeTestJSON-1360856578-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 735.141195] env[62096]: DEBUG nova.virt.hardware [None req-d841432a-bb6a-4a14-a0d1-d583a8a5e9c6 tempest-ServerAddressesNegativeTestJSON-1360856578 tempest-ServerAddressesNegativeTestJSON-1360856578-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 735.141451] env[62096]: DEBUG nova.virt.hardware [None req-d841432a-bb6a-4a14-a0d1-d583a8a5e9c6 tempest-ServerAddressesNegativeTestJSON-1360856578 tempest-ServerAddressesNegativeTestJSON-1360856578-project-member] Got 1 possible topologies {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 735.141645] env[62096]: DEBUG nova.virt.hardware [None req-d841432a-bb6a-4a14-a0d1-d583a8a5e9c6 tempest-ServerAddressesNegativeTestJSON-1360856578 tempest-ServerAddressesNegativeTestJSON-1360856578-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 735.141948] env[62096]: DEBUG nova.virt.hardware [None req-d841432a-bb6a-4a14-a0d1-d583a8a5e9c6 tempest-ServerAddressesNegativeTestJSON-1360856578 tempest-ServerAddressesNegativeTestJSON-1360856578-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 735.142682] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-144b613a-5e75-47cd-89ea-f21e81372f2a {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.150891] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24242dc5-39f3-49b5-839f-d5e9863966d7 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.209381] env[62096]: DEBUG nova.compute.manager [req-1aa0063a-5c7a-4563-ac8f-6a5ce0daec0f req-4c8997ef-6d6a-4fd7-b403-4e2b1ca08ea7 service nova] [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] Received event network-changed-e1e9548f-8e05-4826-85a0-6602fd03e638 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 735.209572] env[62096]: DEBUG nova.compute.manager [req-1aa0063a-5c7a-4563-ac8f-6a5ce0daec0f req-4c8997ef-6d6a-4fd7-b403-4e2b1ca08ea7 service nova] [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] Refreshing instance network info cache due to event network-changed-e1e9548f-8e05-4826-85a0-6602fd03e638. {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 735.209790] env[62096]: DEBUG oslo_concurrency.lockutils [req-1aa0063a-5c7a-4563-ac8f-6a5ce0daec0f req-4c8997ef-6d6a-4fd7-b403-4e2b1ca08ea7 service nova] Acquiring lock "refresh_cache-58df043b-ab2f-4e78-8bba-084fe53d3d8e" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 735.209924] env[62096]: DEBUG oslo_concurrency.lockutils [req-1aa0063a-5c7a-4563-ac8f-6a5ce0daec0f req-4c8997ef-6d6a-4fd7-b403-4e2b1ca08ea7 service nova] Acquired lock "refresh_cache-58df043b-ab2f-4e78-8bba-084fe53d3d8e" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 735.210460] env[62096]: DEBUG nova.network.neutron [req-1aa0063a-5c7a-4563-ac8f-6a5ce0daec0f req-4c8997ef-6d6a-4fd7-b403-4e2b1ca08ea7 service nova] [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] Refreshing network info cache for port e1e9548f-8e05-4826-85a0-6602fd03e638 {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 735.210963] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-1aa0063a-5c7a-4563-ac8f-6a5ce0daec0f req-4c8997ef-6d6a-4fd7-b403-4e2b1ca08ea7 service nova] Expecting reply to msg 5d75296b0c39478d8fd7092bc2e08985 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 735.218111] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5d75296b0c39478d8fd7092bc2e08985 [ 735.417958] env[62096]: DEBUG oslo_concurrency.lockutils [None req-5fe8118b-4ebb-4166-ab18-d024b5229d8a tempest-ServerRescueTestJSONUnderV235-1957248754 tempest-ServerRescueTestJSONUnderV235-1957248754-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 735.541387] env[62096]: DEBUG oslo_concurrency.lockutils [None req-c8a8569a-9a38-4dba-b714-875ef3e36f76 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.461s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 735.541929] env[62096]: DEBUG nova.compute.manager [None req-c8a8569a-9a38-4dba-b714-875ef3e36f76 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] [instance: 321ab95b-6221-4bab-b442-a90926098dae] Start building networks asynchronously for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 735.543605] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c8a8569a-9a38-4dba-b714-875ef3e36f76 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Expecting reply to msg b95ea617f880406fb28087987c258002 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 735.544727] env[62096]: DEBUG oslo_concurrency.lockutils [None req-6d518a50-dadb-4b6d-9314-0f65d9990dab tempest-ServersNegativeTestMultiTenantJSON-1390208586 tempest-ServersNegativeTestMultiTenantJSON-1390208586-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 13.839s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 735.546547] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-6d518a50-dadb-4b6d-9314-0f65d9990dab tempest-ServersNegativeTestMultiTenantJSON-1390208586 tempest-ServersNegativeTestMultiTenantJSON-1390208586-project-member] Expecting reply to msg 6a8a652e4c64443c9ea2eaec2abe4190 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 735.560611] env[62096]: ERROR nova.compute.manager [None req-d841432a-bb6a-4a14-a0d1-d583a8a5e9c6 tempest-ServerAddressesNegativeTestJSON-1360856578 tempest-ServerAddressesNegativeTestJSON-1360856578-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port e1e9548f-8e05-4826-85a0-6602fd03e638, please check neutron logs for more information. [ 735.560611] env[62096]: ERROR nova.compute.manager Traceback (most recent call last): [ 735.560611] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 735.560611] env[62096]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 735.560611] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 735.560611] env[62096]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 735.560611] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 735.560611] env[62096]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 735.560611] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 735.560611] env[62096]: ERROR nova.compute.manager self.force_reraise() [ 735.560611] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 735.560611] env[62096]: ERROR nova.compute.manager raise self.value [ 735.560611] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 735.560611] env[62096]: ERROR nova.compute.manager updated_port = self._update_port( [ 735.560611] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 735.560611] env[62096]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 735.561094] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 735.561094] env[62096]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 735.561094] env[62096]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port e1e9548f-8e05-4826-85a0-6602fd03e638, please check neutron logs for more information. [ 735.561094] env[62096]: ERROR nova.compute.manager [ 735.561094] env[62096]: Traceback (most recent call last): [ 735.561094] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 735.561094] env[62096]: listener.cb(fileno) [ 735.561094] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 735.561094] env[62096]: result = function(*args, **kwargs) [ 735.561094] env[62096]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 735.561094] env[62096]: return func(*args, **kwargs) [ 735.561094] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 735.561094] env[62096]: raise e [ 735.561094] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 735.561094] env[62096]: nwinfo = self.network_api.allocate_for_instance( [ 735.561094] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 735.561094] env[62096]: created_port_ids = self._update_ports_for_instance( [ 735.561094] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 735.561094] env[62096]: with excutils.save_and_reraise_exception(): [ 735.561094] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 735.561094] env[62096]: self.force_reraise() [ 735.561094] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 735.561094] env[62096]: raise self.value [ 735.561094] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 735.561094] env[62096]: updated_port = self._update_port( [ 735.561094] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 735.561094] env[62096]: _ensure_no_port_binding_failure(port) [ 735.561094] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 735.561094] env[62096]: raise exception.PortBindingFailed(port_id=port['id']) [ 735.562069] env[62096]: nova.exception.PortBindingFailed: Binding failed for port e1e9548f-8e05-4826-85a0-6602fd03e638, please check neutron logs for more information. [ 735.562069] env[62096]: Removing descriptor: 16 [ 735.562069] env[62096]: ERROR nova.compute.manager [None req-d841432a-bb6a-4a14-a0d1-d583a8a5e9c6 tempest-ServerAddressesNegativeTestJSON-1360856578 tempest-ServerAddressesNegativeTestJSON-1360856578-project-member] [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port e1e9548f-8e05-4826-85a0-6602fd03e638, please check neutron logs for more information. [ 735.562069] env[62096]: ERROR nova.compute.manager [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] Traceback (most recent call last): [ 735.562069] env[62096]: ERROR nova.compute.manager [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 735.562069] env[62096]: ERROR nova.compute.manager [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] yield resources [ 735.562069] env[62096]: ERROR nova.compute.manager [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 735.562069] env[62096]: ERROR nova.compute.manager [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] self.driver.spawn(context, instance, image_meta, [ 735.562069] env[62096]: ERROR nova.compute.manager [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 735.562069] env[62096]: ERROR nova.compute.manager [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 735.562069] env[62096]: ERROR nova.compute.manager [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 735.562069] env[62096]: ERROR nova.compute.manager [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] vm_ref = self.build_virtual_machine(instance, [ 735.562452] env[62096]: ERROR nova.compute.manager [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 735.562452] env[62096]: ERROR nova.compute.manager [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] vif_infos = vmwarevif.get_vif_info(self._session, [ 735.562452] env[62096]: ERROR nova.compute.manager [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 735.562452] env[62096]: ERROR nova.compute.manager [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] for vif in network_info: [ 735.562452] env[62096]: ERROR nova.compute.manager [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 735.562452] env[62096]: ERROR nova.compute.manager [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] return self._sync_wrapper(fn, *args, **kwargs) [ 735.562452] env[62096]: ERROR nova.compute.manager [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 735.562452] env[62096]: ERROR nova.compute.manager [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] self.wait() [ 735.562452] env[62096]: ERROR nova.compute.manager [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 735.562452] env[62096]: ERROR nova.compute.manager [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] self[:] = self._gt.wait() [ 735.562452] env[62096]: ERROR nova.compute.manager [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 735.562452] env[62096]: ERROR nova.compute.manager [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] return self._exit_event.wait() [ 735.562452] env[62096]: ERROR nova.compute.manager [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 735.562850] env[62096]: ERROR nova.compute.manager [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] result = hub.switch() [ 735.562850] env[62096]: ERROR nova.compute.manager [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 735.562850] env[62096]: ERROR nova.compute.manager [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] return self.greenlet.switch() [ 735.562850] env[62096]: ERROR nova.compute.manager [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 735.562850] env[62096]: ERROR nova.compute.manager [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] result = function(*args, **kwargs) [ 735.562850] env[62096]: ERROR nova.compute.manager [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 735.562850] env[62096]: ERROR nova.compute.manager [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] return func(*args, **kwargs) [ 735.562850] env[62096]: ERROR nova.compute.manager [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 735.562850] env[62096]: ERROR nova.compute.manager [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] raise e [ 735.562850] env[62096]: ERROR nova.compute.manager [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 735.562850] env[62096]: ERROR nova.compute.manager [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] nwinfo = self.network_api.allocate_for_instance( [ 735.562850] env[62096]: ERROR nova.compute.manager [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 735.562850] env[62096]: ERROR nova.compute.manager [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] created_port_ids = self._update_ports_for_instance( [ 735.563330] env[62096]: ERROR nova.compute.manager [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 735.563330] env[62096]: ERROR nova.compute.manager [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] with excutils.save_and_reraise_exception(): [ 735.563330] env[62096]: ERROR nova.compute.manager [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 735.563330] env[62096]: ERROR nova.compute.manager [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] self.force_reraise() [ 735.563330] env[62096]: ERROR nova.compute.manager [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 735.563330] env[62096]: ERROR nova.compute.manager [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] raise self.value [ 735.563330] env[62096]: ERROR nova.compute.manager [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 735.563330] env[62096]: ERROR nova.compute.manager [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] updated_port = self._update_port( [ 735.563330] env[62096]: ERROR nova.compute.manager [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 735.563330] env[62096]: ERROR nova.compute.manager [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] _ensure_no_port_binding_failure(port) [ 735.563330] env[62096]: ERROR nova.compute.manager [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 735.563330] env[62096]: ERROR nova.compute.manager [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] raise exception.PortBindingFailed(port_id=port['id']) [ 735.563755] env[62096]: ERROR nova.compute.manager [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] nova.exception.PortBindingFailed: Binding failed for port e1e9548f-8e05-4826-85a0-6602fd03e638, please check neutron logs for more information. [ 735.563755] env[62096]: ERROR nova.compute.manager [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] [ 735.563755] env[62096]: INFO nova.compute.manager [None req-d841432a-bb6a-4a14-a0d1-d583a8a5e9c6 tempest-ServerAddressesNegativeTestJSON-1360856578 tempest-ServerAddressesNegativeTestJSON-1360856578-project-member] [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] Terminating instance [ 735.563755] env[62096]: DEBUG oslo_concurrency.lockutils [None req-d841432a-bb6a-4a14-a0d1-d583a8a5e9c6 tempest-ServerAddressesNegativeTestJSON-1360856578 tempest-ServerAddressesNegativeTestJSON-1360856578-project-member] Acquiring lock "refresh_cache-58df043b-ab2f-4e78-8bba-084fe53d3d8e" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 735.588425] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6a8a652e4c64443c9ea2eaec2abe4190 [ 735.590795] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b95ea617f880406fb28087987c258002 [ 735.735815] env[62096]: DEBUG nova.network.neutron [req-1aa0063a-5c7a-4563-ac8f-6a5ce0daec0f req-4c8997ef-6d6a-4fd7-b403-4e2b1ca08ea7 service nova] [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 736.052886] env[62096]: DEBUG nova.network.neutron [req-1aa0063a-5c7a-4563-ac8f-6a5ce0daec0f req-4c8997ef-6d6a-4fd7-b403-4e2b1ca08ea7 service nova] [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 736.053390] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-1aa0063a-5c7a-4563-ac8f-6a5ce0daec0f req-4c8997ef-6d6a-4fd7-b403-4e2b1ca08ea7 service nova] Expecting reply to msg f5dec3e5bfa846d59d0ddf5929494412 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 736.055089] env[62096]: DEBUG nova.compute.utils [None req-c8a8569a-9a38-4dba-b714-875ef3e36f76 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Using /dev/sd instead of None {{(pid=62096) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 736.055648] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c8a8569a-9a38-4dba-b714-875ef3e36f76 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Expecting reply to msg a576d17e5644480c8fb566be61c9f2e8 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 736.063540] env[62096]: DEBUG nova.compute.manager [None req-c8a8569a-9a38-4dba-b714-875ef3e36f76 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] [instance: 321ab95b-6221-4bab-b442-a90926098dae] Allocating IP information in the background. {{(pid=62096) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 736.063717] env[62096]: DEBUG nova.network.neutron [None req-c8a8569a-9a38-4dba-b714-875ef3e36f76 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] [instance: 321ab95b-6221-4bab-b442-a90926098dae] allocate_for_instance() {{(pid=62096) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 736.072187] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f5dec3e5bfa846d59d0ddf5929494412 [ 736.077947] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a576d17e5644480c8fb566be61c9f2e8 [ 736.158291] env[62096]: DEBUG nova.policy [None req-c8a8569a-9a38-4dba-b714-875ef3e36f76 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8fdb28d80ba747189147a109e83d6b8e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '962f0302df2a4282a6fadece663807fe', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62096) authorize /opt/stack/nova/nova/policy.py:203}} [ 736.458407] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cda7f340-b40c-4b12-853f-fbc3e5e25cf4 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.465910] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4354a9d9-cdc6-48ee-898e-aefead383e81 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.503006] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d7ffe5c-ce7b-4df1-be69-696dca6d0bd8 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.511688] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30270d6a-a692-4b16-bcaa-568eed7a6b92 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.526490] env[62096]: DEBUG nova.compute.provider_tree [None req-6d518a50-dadb-4b6d-9314-0f65d9990dab tempest-ServersNegativeTestMultiTenantJSON-1390208586 tempest-ServersNegativeTestMultiTenantJSON-1390208586-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 736.527174] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-6d518a50-dadb-4b6d-9314-0f65d9990dab tempest-ServersNegativeTestMultiTenantJSON-1390208586 tempest-ServersNegativeTestMultiTenantJSON-1390208586-project-member] Expecting reply to msg e4c0f08cb67b4b5c98e8179ca0fac5b2 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 736.534474] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e4c0f08cb67b4b5c98e8179ca0fac5b2 [ 736.565078] env[62096]: DEBUG oslo_concurrency.lockutils [req-1aa0063a-5c7a-4563-ac8f-6a5ce0daec0f req-4c8997ef-6d6a-4fd7-b403-4e2b1ca08ea7 service nova] Releasing lock "refresh_cache-58df043b-ab2f-4e78-8bba-084fe53d3d8e" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 736.565798] env[62096]: DEBUG nova.compute.manager [None req-c8a8569a-9a38-4dba-b714-875ef3e36f76 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] [instance: 321ab95b-6221-4bab-b442-a90926098dae] Start building block device mappings for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 736.568527] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c8a8569a-9a38-4dba-b714-875ef3e36f76 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Expecting reply to msg 4d3afc8228de41fcafaf04baca44f4cd in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 736.570775] env[62096]: DEBUG oslo_concurrency.lockutils [None req-d841432a-bb6a-4a14-a0d1-d583a8a5e9c6 tempest-ServerAddressesNegativeTestJSON-1360856578 tempest-ServerAddressesNegativeTestJSON-1360856578-project-member] Acquired lock "refresh_cache-58df043b-ab2f-4e78-8bba-084fe53d3d8e" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 736.571077] env[62096]: DEBUG nova.network.neutron [None req-d841432a-bb6a-4a14-a0d1-d583a8a5e9c6 tempest-ServerAddressesNegativeTestJSON-1360856578 tempest-ServerAddressesNegativeTestJSON-1360856578-project-member] [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 736.571561] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d841432a-bb6a-4a14-a0d1-d583a8a5e9c6 tempest-ServerAddressesNegativeTestJSON-1360856578 tempest-ServerAddressesNegativeTestJSON-1360856578-project-member] Expecting reply to msg 827c08def0ae44b7acdcbbda984b8b85 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 736.588260] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 827c08def0ae44b7acdcbbda984b8b85 [ 736.609746] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4d3afc8228de41fcafaf04baca44f4cd [ 736.804243] env[62096]: DEBUG nova.network.neutron [None req-c8a8569a-9a38-4dba-b714-875ef3e36f76 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] [instance: 321ab95b-6221-4bab-b442-a90926098dae] Successfully created port: c0890e55-c21e-40bc-b223-5d06636aba10 {{(pid=62096) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 737.029632] env[62096]: DEBUG nova.scheduler.client.report [None req-6d518a50-dadb-4b6d-9314-0f65d9990dab tempest-ServersNegativeTestMultiTenantJSON-1390208586 tempest-ServersNegativeTestMultiTenantJSON-1390208586-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 737.031988] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-6d518a50-dadb-4b6d-9314-0f65d9990dab tempest-ServersNegativeTestMultiTenantJSON-1390208586 tempest-ServersNegativeTestMultiTenantJSON-1390208586-project-member] Expecting reply to msg b250c04e50d6424387a8b0f4fa354988 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 737.044426] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b250c04e50d6424387a8b0f4fa354988 [ 737.076378] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c8a8569a-9a38-4dba-b714-875ef3e36f76 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Expecting reply to msg cb71c08b918b4cad8cb208257d226c31 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 737.111000] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cb71c08b918b4cad8cb208257d226c31 [ 737.117570] env[62096]: DEBUG nova.network.neutron [None req-d841432a-bb6a-4a14-a0d1-d583a8a5e9c6 tempest-ServerAddressesNegativeTestJSON-1360856578 tempest-ServerAddressesNegativeTestJSON-1360856578-project-member] [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 737.355860] env[62096]: DEBUG nova.network.neutron [None req-d841432a-bb6a-4a14-a0d1-d583a8a5e9c6 tempest-ServerAddressesNegativeTestJSON-1360856578 tempest-ServerAddressesNegativeTestJSON-1360856578-project-member] [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 737.356460] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d841432a-bb6a-4a14-a0d1-d583a8a5e9c6 tempest-ServerAddressesNegativeTestJSON-1360856578 tempest-ServerAddressesNegativeTestJSON-1360856578-project-member] Expecting reply to msg ae6282b008df49a6a50f5f466b373ba3 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 737.369236] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ae6282b008df49a6a50f5f466b373ba3 [ 737.387533] env[62096]: DEBUG nova.compute.manager [req-faf17c46-3bab-48b8-b4d8-3fe04dfcc06b req-07067413-3ce6-4da7-b23e-1f512afac75c service nova] [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] Received event network-vif-deleted-e1e9548f-8e05-4826-85a0-6602fd03e638 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 737.540091] env[62096]: DEBUG oslo_concurrency.lockutils [None req-6d518a50-dadb-4b6d-9314-0f65d9990dab tempest-ServersNegativeTestMultiTenantJSON-1390208586 tempest-ServersNegativeTestMultiTenantJSON-1390208586-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.990s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 737.540091] env[62096]: ERROR nova.compute.manager [None req-6d518a50-dadb-4b6d-9314-0f65d9990dab tempest-ServersNegativeTestMultiTenantJSON-1390208586 tempest-ServersNegativeTestMultiTenantJSON-1390208586-project-member] [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 3d1a8a96-d849-473c-8572-e26b0c89c85a, please check neutron logs for more information. [ 737.540091] env[62096]: ERROR nova.compute.manager [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] Traceback (most recent call last): [ 737.540091] env[62096]: ERROR nova.compute.manager [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 737.540091] env[62096]: ERROR nova.compute.manager [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] self.driver.spawn(context, instance, image_meta, [ 737.540091] env[62096]: ERROR nova.compute.manager [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 737.540091] env[62096]: ERROR nova.compute.manager [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] self._vmops.spawn(context, instance, image_meta, injected_files, [ 737.540091] env[62096]: ERROR nova.compute.manager [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 737.540091] env[62096]: ERROR nova.compute.manager [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] vm_ref = self.build_virtual_machine(instance, [ 737.541507] env[62096]: ERROR nova.compute.manager [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 737.541507] env[62096]: ERROR nova.compute.manager [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] vif_infos = vmwarevif.get_vif_info(self._session, [ 737.541507] env[62096]: ERROR nova.compute.manager [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 737.541507] env[62096]: ERROR nova.compute.manager [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] for vif in network_info: [ 737.541507] env[62096]: ERROR nova.compute.manager [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 737.541507] env[62096]: ERROR nova.compute.manager [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] return self._sync_wrapper(fn, *args, **kwargs) [ 737.541507] env[62096]: ERROR nova.compute.manager [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 737.541507] env[62096]: ERROR nova.compute.manager [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] self.wait() [ 737.541507] env[62096]: ERROR nova.compute.manager [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 737.541507] env[62096]: ERROR nova.compute.manager [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] self[:] = self._gt.wait() [ 737.541507] env[62096]: ERROR nova.compute.manager [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 737.541507] env[62096]: ERROR nova.compute.manager [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] return self._exit_event.wait() [ 737.541507] env[62096]: ERROR nova.compute.manager [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 737.542236] env[62096]: ERROR nova.compute.manager [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] result = hub.switch() [ 737.542236] env[62096]: ERROR nova.compute.manager [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 737.542236] env[62096]: ERROR nova.compute.manager [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] return self.greenlet.switch() [ 737.542236] env[62096]: ERROR nova.compute.manager [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 737.542236] env[62096]: ERROR nova.compute.manager [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] result = function(*args, **kwargs) [ 737.542236] env[62096]: ERROR nova.compute.manager [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 737.542236] env[62096]: ERROR nova.compute.manager [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] return func(*args, **kwargs) [ 737.542236] env[62096]: ERROR nova.compute.manager [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 737.542236] env[62096]: ERROR nova.compute.manager [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] raise e [ 737.542236] env[62096]: ERROR nova.compute.manager [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 737.542236] env[62096]: ERROR nova.compute.manager [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] nwinfo = self.network_api.allocate_for_instance( [ 737.542236] env[62096]: ERROR nova.compute.manager [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 737.542236] env[62096]: ERROR nova.compute.manager [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] created_port_ids = self._update_ports_for_instance( [ 737.542698] env[62096]: ERROR nova.compute.manager [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 737.542698] env[62096]: ERROR nova.compute.manager [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] with excutils.save_and_reraise_exception(): [ 737.542698] env[62096]: ERROR nova.compute.manager [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 737.542698] env[62096]: ERROR nova.compute.manager [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] self.force_reraise() [ 737.542698] env[62096]: ERROR nova.compute.manager [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 737.542698] env[62096]: ERROR nova.compute.manager [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] raise self.value [ 737.542698] env[62096]: ERROR nova.compute.manager [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 737.542698] env[62096]: ERROR nova.compute.manager [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] updated_port = self._update_port( [ 737.542698] env[62096]: ERROR nova.compute.manager [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 737.542698] env[62096]: ERROR nova.compute.manager [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] _ensure_no_port_binding_failure(port) [ 737.542698] env[62096]: ERROR nova.compute.manager [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 737.542698] env[62096]: ERROR nova.compute.manager [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] raise exception.PortBindingFailed(port_id=port['id']) [ 737.543086] env[62096]: ERROR nova.compute.manager [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] nova.exception.PortBindingFailed: Binding failed for port 3d1a8a96-d849-473c-8572-e26b0c89c85a, please check neutron logs for more information. [ 737.543086] env[62096]: ERROR nova.compute.manager [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] [ 737.543086] env[62096]: DEBUG nova.compute.utils [None req-6d518a50-dadb-4b6d-9314-0f65d9990dab tempest-ServersNegativeTestMultiTenantJSON-1390208586 tempest-ServersNegativeTestMultiTenantJSON-1390208586-project-member] [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] Binding failed for port 3d1a8a96-d849-473c-8572-e26b0c89c85a, please check neutron logs for more information. {{(pid=62096) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 737.543086] env[62096]: DEBUG oslo_concurrency.lockutils [None req-fce0ab80-047e-4dc5-9e51-67218a6b1057 tempest-AttachInterfacesUnderV243Test-218968522 tempest-AttachInterfacesUnderV243Test-218968522-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.974s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 737.543086] env[62096]: INFO nova.compute.claims [None req-fce0ab80-047e-4dc5-9e51-67218a6b1057 tempest-AttachInterfacesUnderV243Test-218968522 tempest-AttachInterfacesUnderV243Test-218968522-project-member] [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 737.545404] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-fce0ab80-047e-4dc5-9e51-67218a6b1057 tempest-AttachInterfacesUnderV243Test-218968522 tempest-AttachInterfacesUnderV243Test-218968522-project-member] Expecting reply to msg 8729622c727349e093d10c81478eb1ce in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 737.552985] env[62096]: DEBUG nova.compute.manager [None req-6d518a50-dadb-4b6d-9314-0f65d9990dab tempest-ServersNegativeTestMultiTenantJSON-1390208586 tempest-ServersNegativeTestMultiTenantJSON-1390208586-project-member] [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] Build of instance 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe was re-scheduled: Binding failed for port 3d1a8a96-d849-473c-8572-e26b0c89c85a, please check neutron logs for more information. {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 737.552985] env[62096]: DEBUG nova.compute.manager [None req-6d518a50-dadb-4b6d-9314-0f65d9990dab tempest-ServersNegativeTestMultiTenantJSON-1390208586 tempest-ServersNegativeTestMultiTenantJSON-1390208586-project-member] [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] Unplugging VIFs for instance {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 737.552985] env[62096]: DEBUG oslo_concurrency.lockutils [None req-6d518a50-dadb-4b6d-9314-0f65d9990dab tempest-ServersNegativeTestMultiTenantJSON-1390208586 tempest-ServersNegativeTestMultiTenantJSON-1390208586-project-member] Acquiring lock "refresh_cache-4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 737.552985] env[62096]: DEBUG oslo_concurrency.lockutils [None req-6d518a50-dadb-4b6d-9314-0f65d9990dab tempest-ServersNegativeTestMultiTenantJSON-1390208586 tempest-ServersNegativeTestMultiTenantJSON-1390208586-project-member] Acquired lock "refresh_cache-4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 737.553347] env[62096]: DEBUG nova.network.neutron [None req-6d518a50-dadb-4b6d-9314-0f65d9990dab tempest-ServersNegativeTestMultiTenantJSON-1390208586 tempest-ServersNegativeTestMultiTenantJSON-1390208586-project-member] [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 737.553347] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-6d518a50-dadb-4b6d-9314-0f65d9990dab tempest-ServersNegativeTestMultiTenantJSON-1390208586 tempest-ServersNegativeTestMultiTenantJSON-1390208586-project-member] Expecting reply to msg f9e052ccedb5400db6c7bf48222469f6 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 737.557945] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f9e052ccedb5400db6c7bf48222469f6 [ 737.580988] env[62096]: DEBUG nova.compute.manager [None req-c8a8569a-9a38-4dba-b714-875ef3e36f76 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] [instance: 321ab95b-6221-4bab-b442-a90926098dae] Start spawning the instance on the hypervisor. {{(pid=62096) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 737.594591] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8729622c727349e093d10c81478eb1ce [ 737.611852] env[62096]: DEBUG nova.virt.hardware [None req-c8a8569a-9a38-4dba-b714-875ef3e36f76 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-19T11:52:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-19T11:51:55Z,direct_url=,disk_format='vmdk',id=fb764baa-3805-45c4-a694-aa91b0932110,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bd76085d004b408bbcf1ebf76ba2593e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-19T11:51:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 737.612376] env[62096]: DEBUG nova.virt.hardware [None req-c8a8569a-9a38-4dba-b714-875ef3e36f76 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Flavor limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 737.612569] env[62096]: DEBUG nova.virt.hardware [None req-c8a8569a-9a38-4dba-b714-875ef3e36f76 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Image limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 737.612761] env[62096]: DEBUG nova.virt.hardware [None req-c8a8569a-9a38-4dba-b714-875ef3e36f76 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Flavor pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 737.612910] env[62096]: DEBUG nova.virt.hardware [None req-c8a8569a-9a38-4dba-b714-875ef3e36f76 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Image pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 737.613058] env[62096]: DEBUG nova.virt.hardware [None req-c8a8569a-9a38-4dba-b714-875ef3e36f76 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 737.613272] env[62096]: DEBUG nova.virt.hardware [None req-c8a8569a-9a38-4dba-b714-875ef3e36f76 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 737.613432] env[62096]: DEBUG nova.virt.hardware [None req-c8a8569a-9a38-4dba-b714-875ef3e36f76 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 737.613949] env[62096]: DEBUG nova.virt.hardware [None req-c8a8569a-9a38-4dba-b714-875ef3e36f76 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Got 1 possible topologies {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 737.613949] env[62096]: DEBUG nova.virt.hardware [None req-c8a8569a-9a38-4dba-b714-875ef3e36f76 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 737.614073] env[62096]: DEBUG nova.virt.hardware [None req-c8a8569a-9a38-4dba-b714-875ef3e36f76 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 737.623321] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3270085-db78-4ea7-b312-fd5cdb714901 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.632578] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63f59ed7-c864-4b65-aa1e-f6c7b27f95f6 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.862023] env[62096]: DEBUG oslo_concurrency.lockutils [None req-d841432a-bb6a-4a14-a0d1-d583a8a5e9c6 tempest-ServerAddressesNegativeTestJSON-1360856578 tempest-ServerAddressesNegativeTestJSON-1360856578-project-member] Releasing lock "refresh_cache-58df043b-ab2f-4e78-8bba-084fe53d3d8e" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 737.862023] env[62096]: DEBUG nova.compute.manager [None req-d841432a-bb6a-4a14-a0d1-d583a8a5e9c6 tempest-ServerAddressesNegativeTestJSON-1360856578 tempest-ServerAddressesNegativeTestJSON-1360856578-project-member] [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] Start destroying the instance on the hypervisor. {{(pid=62096) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 737.862023] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-d841432a-bb6a-4a14-a0d1-d583a8a5e9c6 tempest-ServerAddressesNegativeTestJSON-1360856578 tempest-ServerAddressesNegativeTestJSON-1360856578-project-member] [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] Destroying instance {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 737.862023] env[62096]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-33006082-af1d-4c4e-8867-72b0cdc18b4a {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.872531] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2106fc3-7c23-425b-850a-0c2b026b5428 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.897590] env[62096]: WARNING nova.virt.vmwareapi.vmops [None req-d841432a-bb6a-4a14-a0d1-d583a8a5e9c6 tempest-ServerAddressesNegativeTestJSON-1360856578 tempest-ServerAddressesNegativeTestJSON-1360856578-project-member] [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 58df043b-ab2f-4e78-8bba-084fe53d3d8e could not be found. [ 737.898237] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-d841432a-bb6a-4a14-a0d1-d583a8a5e9c6 tempest-ServerAddressesNegativeTestJSON-1360856578 tempest-ServerAddressesNegativeTestJSON-1360856578-project-member] [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] Instance destroyed {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 737.898572] env[62096]: INFO nova.compute.manager [None req-d841432a-bb6a-4a14-a0d1-d583a8a5e9c6 tempest-ServerAddressesNegativeTestJSON-1360856578 tempest-ServerAddressesNegativeTestJSON-1360856578-project-member] [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] Took 0.04 seconds to destroy the instance on the hypervisor. [ 737.898962] env[62096]: DEBUG oslo.service.loopingcall [None req-d841432a-bb6a-4a14-a0d1-d583a8a5e9c6 tempest-ServerAddressesNegativeTestJSON-1360856578 tempest-ServerAddressesNegativeTestJSON-1360856578-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 737.899330] env[62096]: DEBUG nova.compute.manager [-] [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 737.899538] env[62096]: DEBUG nova.network.neutron [-] [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 737.943209] env[62096]: DEBUG nova.network.neutron [-] [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 737.943209] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 983fd7cdd3604c30be84d02d67041e1c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 737.950195] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 983fd7cdd3604c30be84d02d67041e1c [ 738.052282] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-fce0ab80-047e-4dc5-9e51-67218a6b1057 tempest-AttachInterfacesUnderV243Test-218968522 tempest-AttachInterfacesUnderV243Test-218968522-project-member] Expecting reply to msg 098f5d7991cf41a9b98f5d1bcd26eb75 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 738.061246] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 098f5d7991cf41a9b98f5d1bcd26eb75 [ 738.095650] env[62096]: DEBUG nova.network.neutron [None req-6d518a50-dadb-4b6d-9314-0f65d9990dab tempest-ServersNegativeTestMultiTenantJSON-1390208586 tempest-ServersNegativeTestMultiTenantJSON-1390208586-project-member] [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 738.277646] env[62096]: DEBUG oslo_concurrency.lockutils [None req-b7c0425f-be19-4b33-b788-3180cc28ca6d tempest-InstanceActionsV221TestJSON-713498315 tempest-InstanceActionsV221TestJSON-713498315-project-member] Acquiring lock "fcf84c67-fdea-41d7-aed9-690a45c97eaa" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 738.277646] env[62096]: DEBUG oslo_concurrency.lockutils [None req-b7c0425f-be19-4b33-b788-3180cc28ca6d tempest-InstanceActionsV221TestJSON-713498315 tempest-InstanceActionsV221TestJSON-713498315-project-member] Lock "fcf84c67-fdea-41d7-aed9-690a45c97eaa" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 738.370451] env[62096]: DEBUG nova.network.neutron [None req-6d518a50-dadb-4b6d-9314-0f65d9990dab tempest-ServersNegativeTestMultiTenantJSON-1390208586 tempest-ServersNegativeTestMultiTenantJSON-1390208586-project-member] [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 738.370972] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-6d518a50-dadb-4b6d-9314-0f65d9990dab tempest-ServersNegativeTestMultiTenantJSON-1390208586 tempest-ServersNegativeTestMultiTenantJSON-1390208586-project-member] Expecting reply to msg b9822dd6a1e44c609fcf59005c443587 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 738.384030] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b9822dd6a1e44c609fcf59005c443587 [ 738.422844] env[62096]: ERROR nova.compute.manager [None req-c8a8569a-9a38-4dba-b714-875ef3e36f76 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port c0890e55-c21e-40bc-b223-5d06636aba10, please check neutron logs for more information. [ 738.422844] env[62096]: ERROR nova.compute.manager Traceback (most recent call last): [ 738.422844] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 738.422844] env[62096]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 738.422844] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 738.422844] env[62096]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 738.422844] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 738.422844] env[62096]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 738.422844] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 738.422844] env[62096]: ERROR nova.compute.manager self.force_reraise() [ 738.422844] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 738.422844] env[62096]: ERROR nova.compute.manager raise self.value [ 738.422844] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 738.422844] env[62096]: ERROR nova.compute.manager updated_port = self._update_port( [ 738.422844] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 738.422844] env[62096]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 738.423353] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 738.423353] env[62096]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 738.423353] env[62096]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port c0890e55-c21e-40bc-b223-5d06636aba10, please check neutron logs for more information. [ 738.423353] env[62096]: ERROR nova.compute.manager [ 738.423353] env[62096]: Traceback (most recent call last): [ 738.423353] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 738.423353] env[62096]: listener.cb(fileno) [ 738.423353] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 738.423353] env[62096]: result = function(*args, **kwargs) [ 738.423353] env[62096]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 738.423353] env[62096]: return func(*args, **kwargs) [ 738.423353] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 738.423353] env[62096]: raise e [ 738.423353] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 738.423353] env[62096]: nwinfo = self.network_api.allocate_for_instance( [ 738.423353] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 738.423353] env[62096]: created_port_ids = self._update_ports_for_instance( [ 738.423353] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 738.423353] env[62096]: with excutils.save_and_reraise_exception(): [ 738.423353] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 738.423353] env[62096]: self.force_reraise() [ 738.423353] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 738.423353] env[62096]: raise self.value [ 738.423353] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 738.423353] env[62096]: updated_port = self._update_port( [ 738.423353] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 738.423353] env[62096]: _ensure_no_port_binding_failure(port) [ 738.423353] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 738.423353] env[62096]: raise exception.PortBindingFailed(port_id=port['id']) [ 738.424253] env[62096]: nova.exception.PortBindingFailed: Binding failed for port c0890e55-c21e-40bc-b223-5d06636aba10, please check neutron logs for more information. [ 738.424253] env[62096]: Removing descriptor: 16 [ 738.424253] env[62096]: ERROR nova.compute.manager [None req-c8a8569a-9a38-4dba-b714-875ef3e36f76 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] [instance: 321ab95b-6221-4bab-b442-a90926098dae] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port c0890e55-c21e-40bc-b223-5d06636aba10, please check neutron logs for more information. [ 738.424253] env[62096]: ERROR nova.compute.manager [instance: 321ab95b-6221-4bab-b442-a90926098dae] Traceback (most recent call last): [ 738.424253] env[62096]: ERROR nova.compute.manager [instance: 321ab95b-6221-4bab-b442-a90926098dae] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 738.424253] env[62096]: ERROR nova.compute.manager [instance: 321ab95b-6221-4bab-b442-a90926098dae] yield resources [ 738.424253] env[62096]: ERROR nova.compute.manager [instance: 321ab95b-6221-4bab-b442-a90926098dae] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 738.424253] env[62096]: ERROR nova.compute.manager [instance: 321ab95b-6221-4bab-b442-a90926098dae] self.driver.spawn(context, instance, image_meta, [ 738.424253] env[62096]: ERROR nova.compute.manager [instance: 321ab95b-6221-4bab-b442-a90926098dae] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 738.424253] env[62096]: ERROR nova.compute.manager [instance: 321ab95b-6221-4bab-b442-a90926098dae] self._vmops.spawn(context, instance, image_meta, injected_files, [ 738.424253] env[62096]: ERROR nova.compute.manager [instance: 321ab95b-6221-4bab-b442-a90926098dae] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 738.424253] env[62096]: ERROR nova.compute.manager [instance: 321ab95b-6221-4bab-b442-a90926098dae] vm_ref = self.build_virtual_machine(instance, [ 738.424615] env[62096]: ERROR nova.compute.manager [instance: 321ab95b-6221-4bab-b442-a90926098dae] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 738.424615] env[62096]: ERROR nova.compute.manager [instance: 321ab95b-6221-4bab-b442-a90926098dae] vif_infos = vmwarevif.get_vif_info(self._session, [ 738.424615] env[62096]: ERROR nova.compute.manager [instance: 321ab95b-6221-4bab-b442-a90926098dae] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 738.424615] env[62096]: ERROR nova.compute.manager [instance: 321ab95b-6221-4bab-b442-a90926098dae] for vif in network_info: [ 738.424615] env[62096]: ERROR nova.compute.manager [instance: 321ab95b-6221-4bab-b442-a90926098dae] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 738.424615] env[62096]: ERROR nova.compute.manager [instance: 321ab95b-6221-4bab-b442-a90926098dae] return self._sync_wrapper(fn, *args, **kwargs) [ 738.424615] env[62096]: ERROR nova.compute.manager [instance: 321ab95b-6221-4bab-b442-a90926098dae] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 738.424615] env[62096]: ERROR nova.compute.manager [instance: 321ab95b-6221-4bab-b442-a90926098dae] self.wait() [ 738.424615] env[62096]: ERROR nova.compute.manager [instance: 321ab95b-6221-4bab-b442-a90926098dae] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 738.424615] env[62096]: ERROR nova.compute.manager [instance: 321ab95b-6221-4bab-b442-a90926098dae] self[:] = self._gt.wait() [ 738.424615] env[62096]: ERROR nova.compute.manager [instance: 321ab95b-6221-4bab-b442-a90926098dae] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 738.424615] env[62096]: ERROR nova.compute.manager [instance: 321ab95b-6221-4bab-b442-a90926098dae] return self._exit_event.wait() [ 738.424615] env[62096]: ERROR nova.compute.manager [instance: 321ab95b-6221-4bab-b442-a90926098dae] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 738.425071] env[62096]: ERROR nova.compute.manager [instance: 321ab95b-6221-4bab-b442-a90926098dae] result = hub.switch() [ 738.425071] env[62096]: ERROR nova.compute.manager [instance: 321ab95b-6221-4bab-b442-a90926098dae] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 738.425071] env[62096]: ERROR nova.compute.manager [instance: 321ab95b-6221-4bab-b442-a90926098dae] return self.greenlet.switch() [ 738.425071] env[62096]: ERROR nova.compute.manager [instance: 321ab95b-6221-4bab-b442-a90926098dae] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 738.425071] env[62096]: ERROR nova.compute.manager [instance: 321ab95b-6221-4bab-b442-a90926098dae] result = function(*args, **kwargs) [ 738.425071] env[62096]: ERROR nova.compute.manager [instance: 321ab95b-6221-4bab-b442-a90926098dae] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 738.425071] env[62096]: ERROR nova.compute.manager [instance: 321ab95b-6221-4bab-b442-a90926098dae] return func(*args, **kwargs) [ 738.425071] env[62096]: ERROR nova.compute.manager [instance: 321ab95b-6221-4bab-b442-a90926098dae] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 738.425071] env[62096]: ERROR nova.compute.manager [instance: 321ab95b-6221-4bab-b442-a90926098dae] raise e [ 738.425071] env[62096]: ERROR nova.compute.manager [instance: 321ab95b-6221-4bab-b442-a90926098dae] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 738.425071] env[62096]: ERROR nova.compute.manager [instance: 321ab95b-6221-4bab-b442-a90926098dae] nwinfo = self.network_api.allocate_for_instance( [ 738.425071] env[62096]: ERROR nova.compute.manager [instance: 321ab95b-6221-4bab-b442-a90926098dae] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 738.425071] env[62096]: ERROR nova.compute.manager [instance: 321ab95b-6221-4bab-b442-a90926098dae] created_port_ids = self._update_ports_for_instance( [ 738.425460] env[62096]: ERROR nova.compute.manager [instance: 321ab95b-6221-4bab-b442-a90926098dae] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 738.425460] env[62096]: ERROR nova.compute.manager [instance: 321ab95b-6221-4bab-b442-a90926098dae] with excutils.save_and_reraise_exception(): [ 738.425460] env[62096]: ERROR nova.compute.manager [instance: 321ab95b-6221-4bab-b442-a90926098dae] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 738.425460] env[62096]: ERROR nova.compute.manager [instance: 321ab95b-6221-4bab-b442-a90926098dae] self.force_reraise() [ 738.425460] env[62096]: ERROR nova.compute.manager [instance: 321ab95b-6221-4bab-b442-a90926098dae] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 738.425460] env[62096]: ERROR nova.compute.manager [instance: 321ab95b-6221-4bab-b442-a90926098dae] raise self.value [ 738.425460] env[62096]: ERROR nova.compute.manager [instance: 321ab95b-6221-4bab-b442-a90926098dae] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 738.425460] env[62096]: ERROR nova.compute.manager [instance: 321ab95b-6221-4bab-b442-a90926098dae] updated_port = self._update_port( [ 738.425460] env[62096]: ERROR nova.compute.manager [instance: 321ab95b-6221-4bab-b442-a90926098dae] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 738.425460] env[62096]: ERROR nova.compute.manager [instance: 321ab95b-6221-4bab-b442-a90926098dae] _ensure_no_port_binding_failure(port) [ 738.425460] env[62096]: ERROR nova.compute.manager [instance: 321ab95b-6221-4bab-b442-a90926098dae] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 738.425460] env[62096]: ERROR nova.compute.manager [instance: 321ab95b-6221-4bab-b442-a90926098dae] raise exception.PortBindingFailed(port_id=port['id']) [ 738.425810] env[62096]: ERROR nova.compute.manager [instance: 321ab95b-6221-4bab-b442-a90926098dae] nova.exception.PortBindingFailed: Binding failed for port c0890e55-c21e-40bc-b223-5d06636aba10, please check neutron logs for more information. [ 738.425810] env[62096]: ERROR nova.compute.manager [instance: 321ab95b-6221-4bab-b442-a90926098dae] [ 738.425810] env[62096]: INFO nova.compute.manager [None req-c8a8569a-9a38-4dba-b714-875ef3e36f76 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] [instance: 321ab95b-6221-4bab-b442-a90926098dae] Terminating instance [ 738.428493] env[62096]: DEBUG oslo_concurrency.lockutils [None req-c8a8569a-9a38-4dba-b714-875ef3e36f76 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Acquiring lock "refresh_cache-321ab95b-6221-4bab-b442-a90926098dae" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 738.428655] env[62096]: DEBUG oslo_concurrency.lockutils [None req-c8a8569a-9a38-4dba-b714-875ef3e36f76 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Acquired lock "refresh_cache-321ab95b-6221-4bab-b442-a90926098dae" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 738.428821] env[62096]: DEBUG nova.network.neutron [None req-c8a8569a-9a38-4dba-b714-875ef3e36f76 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] [instance: 321ab95b-6221-4bab-b442-a90926098dae] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 738.429246] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c8a8569a-9a38-4dba-b714-875ef3e36f76 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Expecting reply to msg a823b18ab22b47e38120066059900902 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 738.440609] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a823b18ab22b47e38120066059900902 [ 738.444933] env[62096]: DEBUG nova.network.neutron [-] [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 738.445357] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 4c6583dec99b4524aa46fd3c984aa53c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 738.481835] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4c6583dec99b4524aa46fd3c984aa53c [ 738.873894] env[62096]: DEBUG oslo_concurrency.lockutils [None req-6d518a50-dadb-4b6d-9314-0f65d9990dab tempest-ServersNegativeTestMultiTenantJSON-1390208586 tempest-ServersNegativeTestMultiTenantJSON-1390208586-project-member] Releasing lock "refresh_cache-4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 738.874352] env[62096]: DEBUG nova.compute.manager [None req-6d518a50-dadb-4b6d-9314-0f65d9990dab tempest-ServersNegativeTestMultiTenantJSON-1390208586 tempest-ServersNegativeTestMultiTenantJSON-1390208586-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 738.874409] env[62096]: DEBUG nova.compute.manager [None req-6d518a50-dadb-4b6d-9314-0f65d9990dab tempest-ServersNegativeTestMultiTenantJSON-1390208586 tempest-ServersNegativeTestMultiTenantJSON-1390208586-project-member] [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 738.874608] env[62096]: DEBUG nova.network.neutron [None req-6d518a50-dadb-4b6d-9314-0f65d9990dab tempest-ServersNegativeTestMultiTenantJSON-1390208586 tempest-ServersNegativeTestMultiTenantJSON-1390208586-project-member] [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 738.893950] env[62096]: DEBUG nova.network.neutron [None req-6d518a50-dadb-4b6d-9314-0f65d9990dab tempest-ServersNegativeTestMultiTenantJSON-1390208586 tempest-ServersNegativeTestMultiTenantJSON-1390208586-project-member] [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 738.894728] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-6d518a50-dadb-4b6d-9314-0f65d9990dab tempest-ServersNegativeTestMultiTenantJSON-1390208586 tempest-ServersNegativeTestMultiTenantJSON-1390208586-project-member] Expecting reply to msg 672e8c56c6b44b75aa5f53803e493ca4 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 738.905592] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 672e8c56c6b44b75aa5f53803e493ca4 [ 738.906460] env[62096]: DEBUG nova.network.neutron [None req-6d518a50-dadb-4b6d-9314-0f65d9990dab tempest-ServersNegativeTestMultiTenantJSON-1390208586 tempest-ServersNegativeTestMultiTenantJSON-1390208586-project-member] [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 738.907257] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-6d518a50-dadb-4b6d-9314-0f65d9990dab tempest-ServersNegativeTestMultiTenantJSON-1390208586 tempest-ServersNegativeTestMultiTenantJSON-1390208586-project-member] Expecting reply to msg c9169229b354437a9af15df5828bab3d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 738.916448] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c9169229b354437a9af15df5828bab3d [ 738.960050] env[62096]: INFO nova.compute.manager [-] [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] Took 1.05 seconds to deallocate network for instance. [ 738.964837] env[62096]: DEBUG nova.network.neutron [None req-c8a8569a-9a38-4dba-b714-875ef3e36f76 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] [instance: 321ab95b-6221-4bab-b442-a90926098dae] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 738.969853] env[62096]: DEBUG nova.compute.claims [None req-d841432a-bb6a-4a14-a0d1-d583a8a5e9c6 tempest-ServerAddressesNegativeTestJSON-1360856578 tempest-ServerAddressesNegativeTestJSON-1360856578-project-member] [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] Aborting claim: {{(pid=62096) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 738.970037] env[62096]: DEBUG oslo_concurrency.lockutils [None req-d841432a-bb6a-4a14-a0d1-d583a8a5e9c6 tempest-ServerAddressesNegativeTestJSON-1360856578 tempest-ServerAddressesNegativeTestJSON-1360856578-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 739.015030] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21bb71cd-3104-40fa-8964-eff364f808b0 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.022687] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66ff38fd-4618-4906-a10f-9a09b34b5000 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.061710] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8483ed89-fe2b-4ece-b99d-a98ea418d1c8 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.071835] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08151dba-5ec2-4da9-b2ed-644ada2e4300 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.088388] env[62096]: DEBUG nova.compute.provider_tree [None req-fce0ab80-047e-4dc5-9e51-67218a6b1057 tempest-AttachInterfacesUnderV243Test-218968522 tempest-AttachInterfacesUnderV243Test-218968522-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 739.088895] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-fce0ab80-047e-4dc5-9e51-67218a6b1057 tempest-AttachInterfacesUnderV243Test-218968522 tempest-AttachInterfacesUnderV243Test-218968522-project-member] Expecting reply to msg d5d337cf535a45e7ba6ceda3573ddfeb in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 739.097280] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d5d337cf535a45e7ba6ceda3573ddfeb [ 739.100342] env[62096]: DEBUG nova.network.neutron [None req-c8a8569a-9a38-4dba-b714-875ef3e36f76 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] [instance: 321ab95b-6221-4bab-b442-a90926098dae] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 739.100806] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c8a8569a-9a38-4dba-b714-875ef3e36f76 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Expecting reply to msg d965e2998a7f4f649e3b70588a3d5276 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 739.110111] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d965e2998a7f4f649e3b70588a3d5276 [ 739.409488] env[62096]: INFO nova.compute.manager [None req-6d518a50-dadb-4b6d-9314-0f65d9990dab tempest-ServersNegativeTestMultiTenantJSON-1390208586 tempest-ServersNegativeTestMultiTenantJSON-1390208586-project-member] [instance: 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe] Took 0.53 seconds to deallocate network for instance. [ 739.411246] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-6d518a50-dadb-4b6d-9314-0f65d9990dab tempest-ServersNegativeTestMultiTenantJSON-1390208586 tempest-ServersNegativeTestMultiTenantJSON-1390208586-project-member] Expecting reply to msg 8861e45996844991920ecbbf2eac37d3 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 739.431237] env[62096]: DEBUG nova.compute.manager [req-b32846a1-b3bc-490a-98d7-323e61861a88 req-8b4b35aa-ec28-4308-a7c1-35996376805b service nova] [instance: 321ab95b-6221-4bab-b442-a90926098dae] Received event network-changed-c0890e55-c21e-40bc-b223-5d06636aba10 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 739.431426] env[62096]: DEBUG nova.compute.manager [req-b32846a1-b3bc-490a-98d7-323e61861a88 req-8b4b35aa-ec28-4308-a7c1-35996376805b service nova] [instance: 321ab95b-6221-4bab-b442-a90926098dae] Refreshing instance network info cache due to event network-changed-c0890e55-c21e-40bc-b223-5d06636aba10. {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 739.431612] env[62096]: DEBUG oslo_concurrency.lockutils [req-b32846a1-b3bc-490a-98d7-323e61861a88 req-8b4b35aa-ec28-4308-a7c1-35996376805b service nova] Acquiring lock "refresh_cache-321ab95b-6221-4bab-b442-a90926098dae" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 739.450432] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8861e45996844991920ecbbf2eac37d3 [ 739.591909] env[62096]: DEBUG nova.scheduler.client.report [None req-fce0ab80-047e-4dc5-9e51-67218a6b1057 tempest-AttachInterfacesUnderV243Test-218968522 tempest-AttachInterfacesUnderV243Test-218968522-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 739.594352] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-fce0ab80-047e-4dc5-9e51-67218a6b1057 tempest-AttachInterfacesUnderV243Test-218968522 tempest-AttachInterfacesUnderV243Test-218968522-project-member] Expecting reply to msg 6528a7873cc64939b9448c175391dffc in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 739.608110] env[62096]: DEBUG oslo_concurrency.lockutils [None req-c8a8569a-9a38-4dba-b714-875ef3e36f76 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Releasing lock "refresh_cache-321ab95b-6221-4bab-b442-a90926098dae" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 739.608110] env[62096]: DEBUG nova.compute.manager [None req-c8a8569a-9a38-4dba-b714-875ef3e36f76 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] [instance: 321ab95b-6221-4bab-b442-a90926098dae] Start destroying the instance on the hypervisor. {{(pid=62096) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 739.608110] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-c8a8569a-9a38-4dba-b714-875ef3e36f76 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] [instance: 321ab95b-6221-4bab-b442-a90926098dae] Destroying instance {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 739.608110] env[62096]: DEBUG oslo_concurrency.lockutils [req-b32846a1-b3bc-490a-98d7-323e61861a88 req-8b4b35aa-ec28-4308-a7c1-35996376805b service nova] Acquired lock "refresh_cache-321ab95b-6221-4bab-b442-a90926098dae" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 739.608110] env[62096]: DEBUG nova.network.neutron [req-b32846a1-b3bc-490a-98d7-323e61861a88 req-8b4b35aa-ec28-4308-a7c1-35996376805b service nova] [instance: 321ab95b-6221-4bab-b442-a90926098dae] Refreshing network info cache for port c0890e55-c21e-40bc-b223-5d06636aba10 {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 739.608544] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-b32846a1-b3bc-490a-98d7-323e61861a88 req-8b4b35aa-ec28-4308-a7c1-35996376805b service nova] Expecting reply to msg c2cca6b1ee514048a9b2ec8aad7441fc in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 739.608544] env[62096]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6acfc667-5f71-47b8-a279-0389c26a5d8a {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.608544] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6528a7873cc64939b9448c175391dffc [ 739.610900] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c2cca6b1ee514048a9b2ec8aad7441fc [ 739.615928] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91ba79dc-16f5-4e08-82ab-c2d78de63aaf {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.638637] env[62096]: WARNING nova.virt.vmwareapi.vmops [None req-c8a8569a-9a38-4dba-b714-875ef3e36f76 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] [instance: 321ab95b-6221-4bab-b442-a90926098dae] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 321ab95b-6221-4bab-b442-a90926098dae could not be found. [ 739.638829] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-c8a8569a-9a38-4dba-b714-875ef3e36f76 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] [instance: 321ab95b-6221-4bab-b442-a90926098dae] Instance destroyed {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 739.639062] env[62096]: INFO nova.compute.manager [None req-c8a8569a-9a38-4dba-b714-875ef3e36f76 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] [instance: 321ab95b-6221-4bab-b442-a90926098dae] Took 0.04 seconds to destroy the instance on the hypervisor. [ 739.639266] env[62096]: DEBUG oslo.service.loopingcall [None req-c8a8569a-9a38-4dba-b714-875ef3e36f76 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 739.639459] env[62096]: DEBUG nova.compute.manager [-] [instance: 321ab95b-6221-4bab-b442-a90926098dae] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 739.639532] env[62096]: DEBUG nova.network.neutron [-] [instance: 321ab95b-6221-4bab-b442-a90926098dae] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 739.654901] env[62096]: DEBUG nova.network.neutron [-] [instance: 321ab95b-6221-4bab-b442-a90926098dae] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 739.654901] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 9133bccecac54283bb5ba75934916e42 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 739.664402] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9133bccecac54283bb5ba75934916e42 [ 739.917608] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-6d518a50-dadb-4b6d-9314-0f65d9990dab tempest-ServersNegativeTestMultiTenantJSON-1390208586 tempest-ServersNegativeTestMultiTenantJSON-1390208586-project-member] Expecting reply to msg f724744888ab4313bf8a129aaa38d661 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 739.948318] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f724744888ab4313bf8a129aaa38d661 [ 740.101700] env[62096]: DEBUG oslo_concurrency.lockutils [None req-fce0ab80-047e-4dc5-9e51-67218a6b1057 tempest-AttachInterfacesUnderV243Test-218968522 tempest-AttachInterfacesUnderV243Test-218968522-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.560s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 740.101700] env[62096]: DEBUG nova.compute.manager [None req-fce0ab80-047e-4dc5-9e51-67218a6b1057 tempest-AttachInterfacesUnderV243Test-218968522 tempest-AttachInterfacesUnderV243Test-218968522-project-member] [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] Start building networks asynchronously for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 740.101700] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-fce0ab80-047e-4dc5-9e51-67218a6b1057 tempest-AttachInterfacesUnderV243Test-218968522 tempest-AttachInterfacesUnderV243Test-218968522-project-member] Expecting reply to msg 994d8445dfd2418a817dbc7ef8e66385 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 740.101700] env[62096]: DEBUG oslo_concurrency.lockutils [None req-d5f614b0-b494-4335-b00e-6b8de6a76cbe tempest-ServersWithSpecificFlavorTestJSON-68500714 tempest-ServersWithSpecificFlavorTestJSON-68500714-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.656s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 740.103548] env[62096]: INFO nova.compute.claims [None req-d5f614b0-b494-4335-b00e-6b8de6a76cbe tempest-ServersWithSpecificFlavorTestJSON-68500714 tempest-ServersWithSpecificFlavorTestJSON-68500714-project-member] [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 740.105322] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d5f614b0-b494-4335-b00e-6b8de6a76cbe tempest-ServersWithSpecificFlavorTestJSON-68500714 tempest-ServersWithSpecificFlavorTestJSON-68500714-project-member] Expecting reply to msg a910eea1201f40ecb7690369bac62c69 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 740.150222] env[62096]: DEBUG nova.network.neutron [req-b32846a1-b3bc-490a-98d7-323e61861a88 req-8b4b35aa-ec28-4308-a7c1-35996376805b service nova] [instance: 321ab95b-6221-4bab-b442-a90926098dae] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 740.152320] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 994d8445dfd2418a817dbc7ef8e66385 [ 740.161431] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a910eea1201f40ecb7690369bac62c69 [ 740.161809] env[62096]: DEBUG nova.network.neutron [-] [instance: 321ab95b-6221-4bab-b442-a90926098dae] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 740.162252] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 2ba931f71d2e416e9d958580525bd894 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 740.172948] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2ba931f71d2e416e9d958580525bd894 [ 740.271009] env[62096]: DEBUG nova.network.neutron [req-b32846a1-b3bc-490a-98d7-323e61861a88 req-8b4b35aa-ec28-4308-a7c1-35996376805b service nova] [instance: 321ab95b-6221-4bab-b442-a90926098dae] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 740.271009] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-b32846a1-b3bc-490a-98d7-323e61861a88 req-8b4b35aa-ec28-4308-a7c1-35996376805b service nova] Expecting reply to msg 97275ca482084c6991273a5336ca2048 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 740.278486] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 97275ca482084c6991273a5336ca2048 [ 740.449387] env[62096]: INFO nova.scheduler.client.report [None req-6d518a50-dadb-4b6d-9314-0f65d9990dab tempest-ServersNegativeTestMultiTenantJSON-1390208586 tempest-ServersNegativeTestMultiTenantJSON-1390208586-project-member] Deleted allocations for instance 4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe [ 740.455371] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-6d518a50-dadb-4b6d-9314-0f65d9990dab tempest-ServersNegativeTestMultiTenantJSON-1390208586 tempest-ServersNegativeTestMultiTenantJSON-1390208586-project-member] Expecting reply to msg f260634cb7ac455283d51a68b524c097 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 740.469839] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f260634cb7ac455283d51a68b524c097 [ 740.608609] env[62096]: DEBUG nova.compute.utils [None req-fce0ab80-047e-4dc5-9e51-67218a6b1057 tempest-AttachInterfacesUnderV243Test-218968522 tempest-AttachInterfacesUnderV243Test-218968522-project-member] Using /dev/sd instead of None {{(pid=62096) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 740.609243] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-fce0ab80-047e-4dc5-9e51-67218a6b1057 tempest-AttachInterfacesUnderV243Test-218968522 tempest-AttachInterfacesUnderV243Test-218968522-project-member] Expecting reply to msg 8564cce635994b388cf80310f40000ce in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 740.614142] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d5f614b0-b494-4335-b00e-6b8de6a76cbe tempest-ServersWithSpecificFlavorTestJSON-68500714 tempest-ServersWithSpecificFlavorTestJSON-68500714-project-member] Expecting reply to msg 77a3e0e6cc8b4c5bb499c523edc5bc7f in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 740.614142] env[62096]: DEBUG nova.compute.manager [None req-fce0ab80-047e-4dc5-9e51-67218a6b1057 tempest-AttachInterfacesUnderV243Test-218968522 tempest-AttachInterfacesUnderV243Test-218968522-project-member] [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] Allocating IP information in the background. {{(pid=62096) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 740.614142] env[62096]: DEBUG nova.network.neutron [None req-fce0ab80-047e-4dc5-9e51-67218a6b1057 tempest-AttachInterfacesUnderV243Test-218968522 tempest-AttachInterfacesUnderV243Test-218968522-project-member] [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] allocate_for_instance() {{(pid=62096) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 740.618521] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 77a3e0e6cc8b4c5bb499c523edc5bc7f [ 740.619720] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8564cce635994b388cf80310f40000ce [ 740.663992] env[62096]: INFO nova.compute.manager [-] [instance: 321ab95b-6221-4bab-b442-a90926098dae] Took 1.02 seconds to deallocate network for instance. [ 740.666441] env[62096]: DEBUG nova.compute.claims [None req-c8a8569a-9a38-4dba-b714-875ef3e36f76 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] [instance: 321ab95b-6221-4bab-b442-a90926098dae] Aborting claim: {{(pid=62096) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 740.666591] env[62096]: DEBUG oslo_concurrency.lockutils [None req-c8a8569a-9a38-4dba-b714-875ef3e36f76 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 740.777923] env[62096]: DEBUG oslo_concurrency.lockutils [req-b32846a1-b3bc-490a-98d7-323e61861a88 req-8b4b35aa-ec28-4308-a7c1-35996376805b service nova] Releasing lock "refresh_cache-321ab95b-6221-4bab-b442-a90926098dae" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 740.777923] env[62096]: DEBUG nova.compute.manager [req-b32846a1-b3bc-490a-98d7-323e61861a88 req-8b4b35aa-ec28-4308-a7c1-35996376805b service nova] [instance: 321ab95b-6221-4bab-b442-a90926098dae] Received event network-vif-deleted-c0890e55-c21e-40bc-b223-5d06636aba10 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 740.848258] env[62096]: DEBUG nova.policy [None req-fce0ab80-047e-4dc5-9e51-67218a6b1057 tempest-AttachInterfacesUnderV243Test-218968522 tempest-AttachInterfacesUnderV243Test-218968522-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c0c419bc7a8348759d89df208c57ff0e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f94e0e11282d486b9689b8077a550fc0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62096) authorize /opt/stack/nova/nova/policy.py:203}} [ 740.957784] env[62096]: DEBUG oslo_concurrency.lockutils [None req-6d518a50-dadb-4b6d-9314-0f65d9990dab tempest-ServersNegativeTestMultiTenantJSON-1390208586 tempest-ServersNegativeTestMultiTenantJSON-1390208586-project-member] Lock "4aa9ddb6-f57a-4a61-acfb-38e8f9d879fe" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 145.352s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 740.958424] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-8017d6f3-a8b6-4835-9973-b3f86aaa9630 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Expecting reply to msg e0aa14cc474e4e408235293831c3c570 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 740.969767] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e0aa14cc474e4e408235293831c3c570 [ 741.113717] env[62096]: DEBUG nova.compute.manager [None req-fce0ab80-047e-4dc5-9e51-67218a6b1057 tempest-AttachInterfacesUnderV243Test-218968522 tempest-AttachInterfacesUnderV243Test-218968522-project-member] [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] Start building block device mappings for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 741.116174] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-fce0ab80-047e-4dc5-9e51-67218a6b1057 tempest-AttachInterfacesUnderV243Test-218968522 tempest-AttachInterfacesUnderV243Test-218968522-project-member] Expecting reply to msg 03dac71fa877487c82784dee0f3a289f in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 741.162915] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 03dac71fa877487c82784dee0f3a289f [ 741.379252] env[62096]: DEBUG nova.network.neutron [None req-fce0ab80-047e-4dc5-9e51-67218a6b1057 tempest-AttachInterfacesUnderV243Test-218968522 tempest-AttachInterfacesUnderV243Test-218968522-project-member] [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] Successfully created port: 479cc0d6-65e1-4c95-a612-c8b87a7f1f27 {{(pid=62096) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 741.460198] env[62096]: DEBUG nova.compute.manager [None req-8017d6f3-a8b6-4835-9973-b3f86aaa9630 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] Starting instance... {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 741.462018] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-8017d6f3-a8b6-4835-9973-b3f86aaa9630 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Expecting reply to msg 19bac20805034828933f75cc4ab95900 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 741.496843] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 19bac20805034828933f75cc4ab95900 [ 741.525235] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f82089e-e9ff-4b52-ace3-b7b1eb541403 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.533165] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fea86019-3198-4ba2-800e-8a7349bbac08 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.562637] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9aad7192-f0cd-471b-b174-806a95f3613a {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.570835] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af2e0e9e-fa83-4dae-88d4-b6033c1bf73a {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.584347] env[62096]: DEBUG nova.compute.provider_tree [None req-d5f614b0-b494-4335-b00e-6b8de6a76cbe tempest-ServersWithSpecificFlavorTestJSON-68500714 tempest-ServersWithSpecificFlavorTestJSON-68500714-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 741.584922] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d5f614b0-b494-4335-b00e-6b8de6a76cbe tempest-ServersWithSpecificFlavorTestJSON-68500714 tempest-ServersWithSpecificFlavorTestJSON-68500714-project-member] Expecting reply to msg 19f7d14c61d84659a9d37d10b2acb8fc in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 741.594459] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 19f7d14c61d84659a9d37d10b2acb8fc [ 741.623138] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-fce0ab80-047e-4dc5-9e51-67218a6b1057 tempest-AttachInterfacesUnderV243Test-218968522 tempest-AttachInterfacesUnderV243Test-218968522-project-member] Expecting reply to msg 1b97bb4c1c994308b46687cbc9628e07 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 741.658516] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1b97bb4c1c994308b46687cbc9628e07 [ 741.670395] env[62096]: DEBUG oslo_concurrency.lockutils [None req-5ad96e0f-1e23-473c-97f9-ade218cb855a tempest-InstanceActionsTestJSON-629359483 tempest-InstanceActionsTestJSON-629359483-project-member] Acquiring lock "6cc2a2c1-613d-40bd-a375-424b84b66ac9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 741.670663] env[62096]: DEBUG oslo_concurrency.lockutils [None req-5ad96e0f-1e23-473c-97f9-ade218cb855a tempest-InstanceActionsTestJSON-629359483 tempest-InstanceActionsTestJSON-629359483-project-member] Lock "6cc2a2c1-613d-40bd-a375-424b84b66ac9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 741.982994] env[62096]: DEBUG oslo_concurrency.lockutils [None req-8017d6f3-a8b6-4835-9973-b3f86aaa9630 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 742.088310] env[62096]: DEBUG nova.scheduler.client.report [None req-d5f614b0-b494-4335-b00e-6b8de6a76cbe tempest-ServersWithSpecificFlavorTestJSON-68500714 tempest-ServersWithSpecificFlavorTestJSON-68500714-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 742.090752] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d5f614b0-b494-4335-b00e-6b8de6a76cbe tempest-ServersWithSpecificFlavorTestJSON-68500714 tempest-ServersWithSpecificFlavorTestJSON-68500714-project-member] Expecting reply to msg 26b5ddfc4cb443a0adf8314dfd2eb00d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 742.106607] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 26b5ddfc4cb443a0adf8314dfd2eb00d [ 742.126217] env[62096]: DEBUG nova.compute.manager [None req-fce0ab80-047e-4dc5-9e51-67218a6b1057 tempest-AttachInterfacesUnderV243Test-218968522 tempest-AttachInterfacesUnderV243Test-218968522-project-member] [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] Start spawning the instance on the hypervisor. {{(pid=62096) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 742.154398] env[62096]: DEBUG nova.virt.hardware [None req-fce0ab80-047e-4dc5-9e51-67218a6b1057 tempest-AttachInterfacesUnderV243Test-218968522 tempest-AttachInterfacesUnderV243Test-218968522-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-19T11:52:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-19T11:51:55Z,direct_url=,disk_format='vmdk',id=fb764baa-3805-45c4-a694-aa91b0932110,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bd76085d004b408bbcf1ebf76ba2593e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-19T11:51:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 742.154653] env[62096]: DEBUG nova.virt.hardware [None req-fce0ab80-047e-4dc5-9e51-67218a6b1057 tempest-AttachInterfacesUnderV243Test-218968522 tempest-AttachInterfacesUnderV243Test-218968522-project-member] Flavor limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 742.154813] env[62096]: DEBUG nova.virt.hardware [None req-fce0ab80-047e-4dc5-9e51-67218a6b1057 tempest-AttachInterfacesUnderV243Test-218968522 tempest-AttachInterfacesUnderV243Test-218968522-project-member] Image limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 742.154996] env[62096]: DEBUG nova.virt.hardware [None req-fce0ab80-047e-4dc5-9e51-67218a6b1057 tempest-AttachInterfacesUnderV243Test-218968522 tempest-AttachInterfacesUnderV243Test-218968522-project-member] Flavor pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 742.155140] env[62096]: DEBUG nova.virt.hardware [None req-fce0ab80-047e-4dc5-9e51-67218a6b1057 tempest-AttachInterfacesUnderV243Test-218968522 tempest-AttachInterfacesUnderV243Test-218968522-project-member] Image pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 742.155283] env[62096]: DEBUG nova.virt.hardware [None req-fce0ab80-047e-4dc5-9e51-67218a6b1057 tempest-AttachInterfacesUnderV243Test-218968522 tempest-AttachInterfacesUnderV243Test-218968522-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 742.155487] env[62096]: DEBUG nova.virt.hardware [None req-fce0ab80-047e-4dc5-9e51-67218a6b1057 tempest-AttachInterfacesUnderV243Test-218968522 tempest-AttachInterfacesUnderV243Test-218968522-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 742.155641] env[62096]: DEBUG nova.virt.hardware [None req-fce0ab80-047e-4dc5-9e51-67218a6b1057 tempest-AttachInterfacesUnderV243Test-218968522 tempest-AttachInterfacesUnderV243Test-218968522-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 742.155804] env[62096]: DEBUG nova.virt.hardware [None req-fce0ab80-047e-4dc5-9e51-67218a6b1057 tempest-AttachInterfacesUnderV243Test-218968522 tempest-AttachInterfacesUnderV243Test-218968522-project-member] Got 1 possible topologies {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 742.155965] env[62096]: DEBUG nova.virt.hardware [None req-fce0ab80-047e-4dc5-9e51-67218a6b1057 tempest-AttachInterfacesUnderV243Test-218968522 tempest-AttachInterfacesUnderV243Test-218968522-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 742.156157] env[62096]: DEBUG nova.virt.hardware [None req-fce0ab80-047e-4dc5-9e51-67218a6b1057 tempest-AttachInterfacesUnderV243Test-218968522 tempest-AttachInterfacesUnderV243Test-218968522-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 742.157005] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13d2009c-eb5c-4f8b-a36b-264f1f2ab893 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.164795] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f8dc60c-93a8-47ee-b679-3496b0f3cd10 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.285750] env[62096]: DEBUG nova.compute.manager [req-c027996a-c621-4865-b817-5bb8a9f732f2 req-4b7cfc25-e67f-40db-82fe-dd70bf002841 service nova] [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] Received event network-changed-479cc0d6-65e1-4c95-a612-c8b87a7f1f27 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 742.286023] env[62096]: DEBUG nova.compute.manager [req-c027996a-c621-4865-b817-5bb8a9f732f2 req-4b7cfc25-e67f-40db-82fe-dd70bf002841 service nova] [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] Refreshing instance network info cache due to event network-changed-479cc0d6-65e1-4c95-a612-c8b87a7f1f27. {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 742.286298] env[62096]: DEBUG oslo_concurrency.lockutils [req-c027996a-c621-4865-b817-5bb8a9f732f2 req-4b7cfc25-e67f-40db-82fe-dd70bf002841 service nova] Acquiring lock "refresh_cache-59b37648-7b23-4ae3-90e6-867fbbde25df" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 742.286496] env[62096]: DEBUG oslo_concurrency.lockutils [req-c027996a-c621-4865-b817-5bb8a9f732f2 req-4b7cfc25-e67f-40db-82fe-dd70bf002841 service nova] Acquired lock "refresh_cache-59b37648-7b23-4ae3-90e6-867fbbde25df" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 742.286727] env[62096]: DEBUG nova.network.neutron [req-c027996a-c621-4865-b817-5bb8a9f732f2 req-4b7cfc25-e67f-40db-82fe-dd70bf002841 service nova] [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] Refreshing network info cache for port 479cc0d6-65e1-4c95-a612-c8b87a7f1f27 {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 742.287331] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-c027996a-c621-4865-b817-5bb8a9f732f2 req-4b7cfc25-e67f-40db-82fe-dd70bf002841 service nova] Expecting reply to msg eb445c2029914bbdb1c7d9315c610890 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 742.296360] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg eb445c2029914bbdb1c7d9315c610890 [ 742.572431] env[62096]: ERROR nova.compute.manager [None req-fce0ab80-047e-4dc5-9e51-67218a6b1057 tempest-AttachInterfacesUnderV243Test-218968522 tempest-AttachInterfacesUnderV243Test-218968522-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 479cc0d6-65e1-4c95-a612-c8b87a7f1f27, please check neutron logs for more information. [ 742.572431] env[62096]: ERROR nova.compute.manager Traceback (most recent call last): [ 742.572431] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 742.572431] env[62096]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 742.572431] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 742.572431] env[62096]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 742.572431] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 742.572431] env[62096]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 742.572431] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 742.572431] env[62096]: ERROR nova.compute.manager self.force_reraise() [ 742.572431] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 742.572431] env[62096]: ERROR nova.compute.manager raise self.value [ 742.572431] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 742.572431] env[62096]: ERROR nova.compute.manager updated_port = self._update_port( [ 742.572431] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 742.572431] env[62096]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 742.573026] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 742.573026] env[62096]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 742.573026] env[62096]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 479cc0d6-65e1-4c95-a612-c8b87a7f1f27, please check neutron logs for more information. [ 742.573026] env[62096]: ERROR nova.compute.manager [ 742.573026] env[62096]: Traceback (most recent call last): [ 742.573026] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 742.573026] env[62096]: listener.cb(fileno) [ 742.573026] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 742.573026] env[62096]: result = function(*args, **kwargs) [ 742.573026] env[62096]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 742.573026] env[62096]: return func(*args, **kwargs) [ 742.573026] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 742.573026] env[62096]: raise e [ 742.573026] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 742.573026] env[62096]: nwinfo = self.network_api.allocate_for_instance( [ 742.573026] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 742.573026] env[62096]: created_port_ids = self._update_ports_for_instance( [ 742.573026] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 742.573026] env[62096]: with excutils.save_and_reraise_exception(): [ 742.573026] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 742.573026] env[62096]: self.force_reraise() [ 742.573026] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 742.573026] env[62096]: raise self.value [ 742.573026] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 742.573026] env[62096]: updated_port = self._update_port( [ 742.573026] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 742.573026] env[62096]: _ensure_no_port_binding_failure(port) [ 742.573026] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 742.573026] env[62096]: raise exception.PortBindingFailed(port_id=port['id']) [ 742.574201] env[62096]: nova.exception.PortBindingFailed: Binding failed for port 479cc0d6-65e1-4c95-a612-c8b87a7f1f27, please check neutron logs for more information. [ 742.574201] env[62096]: Removing descriptor: 16 [ 742.574201] env[62096]: ERROR nova.compute.manager [None req-fce0ab80-047e-4dc5-9e51-67218a6b1057 tempest-AttachInterfacesUnderV243Test-218968522 tempest-AttachInterfacesUnderV243Test-218968522-project-member] [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 479cc0d6-65e1-4c95-a612-c8b87a7f1f27, please check neutron logs for more information. [ 742.574201] env[62096]: ERROR nova.compute.manager [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] Traceback (most recent call last): [ 742.574201] env[62096]: ERROR nova.compute.manager [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 742.574201] env[62096]: ERROR nova.compute.manager [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] yield resources [ 742.574201] env[62096]: ERROR nova.compute.manager [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 742.574201] env[62096]: ERROR nova.compute.manager [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] self.driver.spawn(context, instance, image_meta, [ 742.574201] env[62096]: ERROR nova.compute.manager [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 742.574201] env[62096]: ERROR nova.compute.manager [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] self._vmops.spawn(context, instance, image_meta, injected_files, [ 742.574201] env[62096]: ERROR nova.compute.manager [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 742.574201] env[62096]: ERROR nova.compute.manager [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] vm_ref = self.build_virtual_machine(instance, [ 742.574639] env[62096]: ERROR nova.compute.manager [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 742.574639] env[62096]: ERROR nova.compute.manager [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] vif_infos = vmwarevif.get_vif_info(self._session, [ 742.574639] env[62096]: ERROR nova.compute.manager [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 742.574639] env[62096]: ERROR nova.compute.manager [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] for vif in network_info: [ 742.574639] env[62096]: ERROR nova.compute.manager [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 742.574639] env[62096]: ERROR nova.compute.manager [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] return self._sync_wrapper(fn, *args, **kwargs) [ 742.574639] env[62096]: ERROR nova.compute.manager [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 742.574639] env[62096]: ERROR nova.compute.manager [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] self.wait() [ 742.574639] env[62096]: ERROR nova.compute.manager [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 742.574639] env[62096]: ERROR nova.compute.manager [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] self[:] = self._gt.wait() [ 742.574639] env[62096]: ERROR nova.compute.manager [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 742.574639] env[62096]: ERROR nova.compute.manager [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] return self._exit_event.wait() [ 742.574639] env[62096]: ERROR nova.compute.manager [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 742.575084] env[62096]: ERROR nova.compute.manager [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] result = hub.switch() [ 742.575084] env[62096]: ERROR nova.compute.manager [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 742.575084] env[62096]: ERROR nova.compute.manager [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] return self.greenlet.switch() [ 742.575084] env[62096]: ERROR nova.compute.manager [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 742.575084] env[62096]: ERROR nova.compute.manager [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] result = function(*args, **kwargs) [ 742.575084] env[62096]: ERROR nova.compute.manager [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 742.575084] env[62096]: ERROR nova.compute.manager [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] return func(*args, **kwargs) [ 742.575084] env[62096]: ERROR nova.compute.manager [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 742.575084] env[62096]: ERROR nova.compute.manager [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] raise e [ 742.575084] env[62096]: ERROR nova.compute.manager [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 742.575084] env[62096]: ERROR nova.compute.manager [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] nwinfo = self.network_api.allocate_for_instance( [ 742.575084] env[62096]: ERROR nova.compute.manager [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 742.575084] env[62096]: ERROR nova.compute.manager [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] created_port_ids = self._update_ports_for_instance( [ 742.575572] env[62096]: ERROR nova.compute.manager [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 742.575572] env[62096]: ERROR nova.compute.manager [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] with excutils.save_and_reraise_exception(): [ 742.575572] env[62096]: ERROR nova.compute.manager [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 742.575572] env[62096]: ERROR nova.compute.manager [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] self.force_reraise() [ 742.575572] env[62096]: ERROR nova.compute.manager [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 742.575572] env[62096]: ERROR nova.compute.manager [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] raise self.value [ 742.575572] env[62096]: ERROR nova.compute.manager [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 742.575572] env[62096]: ERROR nova.compute.manager [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] updated_port = self._update_port( [ 742.575572] env[62096]: ERROR nova.compute.manager [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 742.575572] env[62096]: ERROR nova.compute.manager [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] _ensure_no_port_binding_failure(port) [ 742.575572] env[62096]: ERROR nova.compute.manager [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 742.575572] env[62096]: ERROR nova.compute.manager [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] raise exception.PortBindingFailed(port_id=port['id']) [ 742.575983] env[62096]: ERROR nova.compute.manager [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] nova.exception.PortBindingFailed: Binding failed for port 479cc0d6-65e1-4c95-a612-c8b87a7f1f27, please check neutron logs for more information. [ 742.575983] env[62096]: ERROR nova.compute.manager [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] [ 742.575983] env[62096]: INFO nova.compute.manager [None req-fce0ab80-047e-4dc5-9e51-67218a6b1057 tempest-AttachInterfacesUnderV243Test-218968522 tempest-AttachInterfacesUnderV243Test-218968522-project-member] [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] Terminating instance [ 742.575983] env[62096]: DEBUG oslo_concurrency.lockutils [None req-fce0ab80-047e-4dc5-9e51-67218a6b1057 tempest-AttachInterfacesUnderV243Test-218968522 tempest-AttachInterfacesUnderV243Test-218968522-project-member] Acquiring lock "refresh_cache-59b37648-7b23-4ae3-90e6-867fbbde25df" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 742.593501] env[62096]: DEBUG oslo_concurrency.lockutils [None req-d5f614b0-b494-4335-b00e-6b8de6a76cbe tempest-ServersWithSpecificFlavorTestJSON-68500714 tempest-ServersWithSpecificFlavorTestJSON-68500714-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.492s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 742.594014] env[62096]: DEBUG nova.compute.manager [None req-d5f614b0-b494-4335-b00e-6b8de6a76cbe tempest-ServersWithSpecificFlavorTestJSON-68500714 tempest-ServersWithSpecificFlavorTestJSON-68500714-project-member] [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] Start building networks asynchronously for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 742.603741] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d5f614b0-b494-4335-b00e-6b8de6a76cbe tempest-ServersWithSpecificFlavorTestJSON-68500714 tempest-ServersWithSpecificFlavorTestJSON-68500714-project-member] Expecting reply to msg 871e38a1a3304b6fb8ed4b7bb91316e1 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 742.608038] env[62096]: DEBUG oslo_concurrency.lockutils [None req-a94ae5da-1291-4194-a0a7-fa8d95eb4821 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.252s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 742.608038] env[62096]: INFO nova.compute.claims [None req-a94ae5da-1291-4194-a0a7-fa8d95eb4821 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 742.608729] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a94ae5da-1291-4194-a0a7-fa8d95eb4821 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg 26d611a9f0514cc699cb04c2e324ca02 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 742.642627] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 871e38a1a3304b6fb8ed4b7bb91316e1 [ 742.649663] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 26d611a9f0514cc699cb04c2e324ca02 [ 742.819579] env[62096]: DEBUG nova.network.neutron [req-c027996a-c621-4865-b817-5bb8a9f732f2 req-4b7cfc25-e67f-40db-82fe-dd70bf002841 service nova] [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 742.975035] env[62096]: DEBUG nova.network.neutron [req-c027996a-c621-4865-b817-5bb8a9f732f2 req-4b7cfc25-e67f-40db-82fe-dd70bf002841 service nova] [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 742.976199] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-c027996a-c621-4865-b817-5bb8a9f732f2 req-4b7cfc25-e67f-40db-82fe-dd70bf002841 service nova] Expecting reply to msg 3e2e03388f1c49b792a2202b25d3dea4 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 742.984197] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3e2e03388f1c49b792a2202b25d3dea4 [ 743.112529] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a94ae5da-1291-4194-a0a7-fa8d95eb4821 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg c11da2e85a804a549943c59720dd84cf in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 743.114383] env[62096]: DEBUG nova.compute.utils [None req-d5f614b0-b494-4335-b00e-6b8de6a76cbe tempest-ServersWithSpecificFlavorTestJSON-68500714 tempest-ServersWithSpecificFlavorTestJSON-68500714-project-member] Using /dev/sd instead of None {{(pid=62096) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 743.114973] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d5f614b0-b494-4335-b00e-6b8de6a76cbe tempest-ServersWithSpecificFlavorTestJSON-68500714 tempest-ServersWithSpecificFlavorTestJSON-68500714-project-member] Expecting reply to msg fc1032c114554a2aa316414fad091fb5 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 743.116149] env[62096]: DEBUG nova.compute.manager [None req-d5f614b0-b494-4335-b00e-6b8de6a76cbe tempest-ServersWithSpecificFlavorTestJSON-68500714 tempest-ServersWithSpecificFlavorTestJSON-68500714-project-member] [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] Allocating IP information in the background. {{(pid=62096) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 743.116331] env[62096]: DEBUG nova.network.neutron [None req-d5f614b0-b494-4335-b00e-6b8de6a76cbe tempest-ServersWithSpecificFlavorTestJSON-68500714 tempest-ServersWithSpecificFlavorTestJSON-68500714-project-member] [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] allocate_for_instance() {{(pid=62096) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 743.122016] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c11da2e85a804a549943c59720dd84cf [ 743.123808] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fc1032c114554a2aa316414fad091fb5 [ 743.198987] env[62096]: DEBUG nova.policy [None req-d5f614b0-b494-4335-b00e-6b8de6a76cbe tempest-ServersWithSpecificFlavorTestJSON-68500714 tempest-ServersWithSpecificFlavorTestJSON-68500714-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c5998dd882ee44c782c57eaa03adeb45', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '74b56c4e7fd64242add8635933a63006', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62096) authorize /opt/stack/nova/nova/policy.py:203}} [ 743.478172] env[62096]: DEBUG oslo_concurrency.lockutils [req-c027996a-c621-4865-b817-5bb8a9f732f2 req-4b7cfc25-e67f-40db-82fe-dd70bf002841 service nova] Releasing lock "refresh_cache-59b37648-7b23-4ae3-90e6-867fbbde25df" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 743.478589] env[62096]: DEBUG oslo_concurrency.lockutils [None req-fce0ab80-047e-4dc5-9e51-67218a6b1057 tempest-AttachInterfacesUnderV243Test-218968522 tempest-AttachInterfacesUnderV243Test-218968522-project-member] Acquired lock "refresh_cache-59b37648-7b23-4ae3-90e6-867fbbde25df" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 743.478771] env[62096]: DEBUG nova.network.neutron [None req-fce0ab80-047e-4dc5-9e51-67218a6b1057 tempest-AttachInterfacesUnderV243Test-218968522 tempest-AttachInterfacesUnderV243Test-218968522-project-member] [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 743.479241] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-fce0ab80-047e-4dc5-9e51-67218a6b1057 tempest-AttachInterfacesUnderV243Test-218968522 tempest-AttachInterfacesUnderV243Test-218968522-project-member] Expecting reply to msg 6da1877d09fe45dcacd650f36ed51273 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 743.487078] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6da1877d09fe45dcacd650f36ed51273 [ 743.619527] env[62096]: DEBUG nova.compute.manager [None req-d5f614b0-b494-4335-b00e-6b8de6a76cbe tempest-ServersWithSpecificFlavorTestJSON-68500714 tempest-ServersWithSpecificFlavorTestJSON-68500714-project-member] [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] Start building block device mappings for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 743.621317] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d5f614b0-b494-4335-b00e-6b8de6a76cbe tempest-ServersWithSpecificFlavorTestJSON-68500714 tempest-ServersWithSpecificFlavorTestJSON-68500714-project-member] Expecting reply to msg 8a1870f2b36a433e841b0fe53ac9c584 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 743.667360] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8a1870f2b36a433e841b0fe53ac9c584 [ 743.799556] env[62096]: DEBUG nova.network.neutron [None req-d5f614b0-b494-4335-b00e-6b8de6a76cbe tempest-ServersWithSpecificFlavorTestJSON-68500714 tempest-ServersWithSpecificFlavorTestJSON-68500714-project-member] [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] Successfully created port: 64fe9acd-2f73-403b-b490-b6600bb258e3 {{(pid=62096) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 744.017055] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99768fc4-3f61-4c9e-829f-960d42f785dd {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.025689] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36729571-dfad-48de-b79a-919da5eb02f9 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.055707] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3549a33a-88b7-4f3a-8ae5-022997f6b97c {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.062728] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d70ca063-d43a-4bef-8326-e8b36cc46f70 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.075377] env[62096]: DEBUG nova.compute.provider_tree [None req-a94ae5da-1291-4194-a0a7-fa8d95eb4821 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 744.075863] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a94ae5da-1291-4194-a0a7-fa8d95eb4821 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg 5f7e0abda6184d8c821823a98a662d84 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 744.082963] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5f7e0abda6184d8c821823a98a662d84 [ 744.108418] env[62096]: DEBUG nova.network.neutron [None req-fce0ab80-047e-4dc5-9e51-67218a6b1057 tempest-AttachInterfacesUnderV243Test-218968522 tempest-AttachInterfacesUnderV243Test-218968522-project-member] [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 744.127568] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d5f614b0-b494-4335-b00e-6b8de6a76cbe tempest-ServersWithSpecificFlavorTestJSON-68500714 tempest-ServersWithSpecificFlavorTestJSON-68500714-project-member] Expecting reply to msg 796fcbb1b4984ee6a96b7faf1c23e8a4 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 744.166387] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 796fcbb1b4984ee6a96b7faf1c23e8a4 [ 744.222334] env[62096]: DEBUG nova.network.neutron [None req-fce0ab80-047e-4dc5-9e51-67218a6b1057 tempest-AttachInterfacesUnderV243Test-218968522 tempest-AttachInterfacesUnderV243Test-218968522-project-member] [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 744.222852] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-fce0ab80-047e-4dc5-9e51-67218a6b1057 tempest-AttachInterfacesUnderV243Test-218968522 tempest-AttachInterfacesUnderV243Test-218968522-project-member] Expecting reply to msg 5c7c14db203548a882edb586eb87d84d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 744.231132] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5c7c14db203548a882edb586eb87d84d [ 744.571139] env[62096]: DEBUG nova.compute.manager [req-08cf5546-1bf6-495e-a269-0ce2262e97fb req-c499831e-674c-48d5-8f96-11f10e80208b service nova] [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] Received event network-vif-deleted-479cc0d6-65e1-4c95-a612-c8b87a7f1f27 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 744.578668] env[62096]: DEBUG nova.scheduler.client.report [None req-a94ae5da-1291-4194-a0a7-fa8d95eb4821 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 744.581025] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a94ae5da-1291-4194-a0a7-fa8d95eb4821 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg 52056bb7f16a4e79afe8daee1ded7ae8 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 744.594151] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 52056bb7f16a4e79afe8daee1ded7ae8 [ 744.636942] env[62096]: DEBUG nova.compute.manager [None req-d5f614b0-b494-4335-b00e-6b8de6a76cbe tempest-ServersWithSpecificFlavorTestJSON-68500714 tempest-ServersWithSpecificFlavorTestJSON-68500714-project-member] [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] Start spawning the instance on the hypervisor. {{(pid=62096) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 744.661779] env[62096]: DEBUG nova.virt.hardware [None req-d5f614b0-b494-4335-b00e-6b8de6a76cbe tempest-ServersWithSpecificFlavorTestJSON-68500714 tempest-ServersWithSpecificFlavorTestJSON-68500714-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-19T11:55:23Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='2056001158',id=36,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_0-2133140877',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-19T11:51:55Z,direct_url=,disk_format='vmdk',id=fb764baa-3805-45c4-a694-aa91b0932110,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bd76085d004b408bbcf1ebf76ba2593e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-19T11:51:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 744.662044] env[62096]: DEBUG nova.virt.hardware [None req-d5f614b0-b494-4335-b00e-6b8de6a76cbe tempest-ServersWithSpecificFlavorTestJSON-68500714 tempest-ServersWithSpecificFlavorTestJSON-68500714-project-member] Flavor limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 744.662199] env[62096]: DEBUG nova.virt.hardware [None req-d5f614b0-b494-4335-b00e-6b8de6a76cbe tempest-ServersWithSpecificFlavorTestJSON-68500714 tempest-ServersWithSpecificFlavorTestJSON-68500714-project-member] Image limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 744.662377] env[62096]: DEBUG nova.virt.hardware [None req-d5f614b0-b494-4335-b00e-6b8de6a76cbe tempest-ServersWithSpecificFlavorTestJSON-68500714 tempest-ServersWithSpecificFlavorTestJSON-68500714-project-member] Flavor pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 744.662523] env[62096]: DEBUG nova.virt.hardware [None req-d5f614b0-b494-4335-b00e-6b8de6a76cbe tempest-ServersWithSpecificFlavorTestJSON-68500714 tempest-ServersWithSpecificFlavorTestJSON-68500714-project-member] Image pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 744.662668] env[62096]: DEBUG nova.virt.hardware [None req-d5f614b0-b494-4335-b00e-6b8de6a76cbe tempest-ServersWithSpecificFlavorTestJSON-68500714 tempest-ServersWithSpecificFlavorTestJSON-68500714-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 744.662868] env[62096]: DEBUG nova.virt.hardware [None req-d5f614b0-b494-4335-b00e-6b8de6a76cbe tempest-ServersWithSpecificFlavorTestJSON-68500714 tempest-ServersWithSpecificFlavorTestJSON-68500714-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 744.663023] env[62096]: DEBUG nova.virt.hardware [None req-d5f614b0-b494-4335-b00e-6b8de6a76cbe tempest-ServersWithSpecificFlavorTestJSON-68500714 tempest-ServersWithSpecificFlavorTestJSON-68500714-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 744.663189] env[62096]: DEBUG nova.virt.hardware [None req-d5f614b0-b494-4335-b00e-6b8de6a76cbe tempest-ServersWithSpecificFlavorTestJSON-68500714 tempest-ServersWithSpecificFlavorTestJSON-68500714-project-member] Got 1 possible topologies {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 744.663346] env[62096]: DEBUG nova.virt.hardware [None req-d5f614b0-b494-4335-b00e-6b8de6a76cbe tempest-ServersWithSpecificFlavorTestJSON-68500714 tempest-ServersWithSpecificFlavorTestJSON-68500714-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 744.663514] env[62096]: DEBUG nova.virt.hardware [None req-d5f614b0-b494-4335-b00e-6b8de6a76cbe tempest-ServersWithSpecificFlavorTestJSON-68500714 tempest-ServersWithSpecificFlavorTestJSON-68500714-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 744.664687] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08ebc46d-5822-423b-8b12-09a32bbe40f4 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.672794] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-675531d9-d32f-44c5-b171-1e093319036d {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.724697] env[62096]: DEBUG oslo_concurrency.lockutils [None req-fce0ab80-047e-4dc5-9e51-67218a6b1057 tempest-AttachInterfacesUnderV243Test-218968522 tempest-AttachInterfacesUnderV243Test-218968522-project-member] Releasing lock "refresh_cache-59b37648-7b23-4ae3-90e6-867fbbde25df" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 744.725160] env[62096]: DEBUG nova.compute.manager [None req-fce0ab80-047e-4dc5-9e51-67218a6b1057 tempest-AttachInterfacesUnderV243Test-218968522 tempest-AttachInterfacesUnderV243Test-218968522-project-member] [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] Start destroying the instance on the hypervisor. {{(pid=62096) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 744.725355] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-fce0ab80-047e-4dc5-9e51-67218a6b1057 tempest-AttachInterfacesUnderV243Test-218968522 tempest-AttachInterfacesUnderV243Test-218968522-project-member] [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] Destroying instance {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 744.725659] env[62096]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c53b85dc-01b9-49f1-8437-3821f8964884 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.733978] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae1a34ac-bb0b-420a-b9dc-84f068b2ec12 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.763375] env[62096]: WARNING nova.virt.vmwareapi.vmops [None req-fce0ab80-047e-4dc5-9e51-67218a6b1057 tempest-AttachInterfacesUnderV243Test-218968522 tempest-AttachInterfacesUnderV243Test-218968522-project-member] [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 59b37648-7b23-4ae3-90e6-867fbbde25df could not be found. [ 744.763764] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-fce0ab80-047e-4dc5-9e51-67218a6b1057 tempest-AttachInterfacesUnderV243Test-218968522 tempest-AttachInterfacesUnderV243Test-218968522-project-member] [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] Instance destroyed {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 744.764062] env[62096]: INFO nova.compute.manager [None req-fce0ab80-047e-4dc5-9e51-67218a6b1057 tempest-AttachInterfacesUnderV243Test-218968522 tempest-AttachInterfacesUnderV243Test-218968522-project-member] [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] Took 0.04 seconds to destroy the instance on the hypervisor. [ 744.764416] env[62096]: DEBUG oslo.service.loopingcall [None req-fce0ab80-047e-4dc5-9e51-67218a6b1057 tempest-AttachInterfacesUnderV243Test-218968522 tempest-AttachInterfacesUnderV243Test-218968522-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 744.764727] env[62096]: DEBUG nova.compute.manager [-] [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 744.764862] env[62096]: DEBUG nova.network.neutron [-] [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 744.803449] env[62096]: DEBUG nova.network.neutron [-] [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 744.804289] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg c2645f53c8e5433c8edb97b2d0c69530 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 744.811544] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c2645f53c8e5433c8edb97b2d0c69530 [ 745.092601] env[62096]: DEBUG oslo_concurrency.lockutils [None req-a94ae5da-1291-4194-a0a7-fa8d95eb4821 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.479s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 745.092601] env[62096]: DEBUG nova.compute.manager [None req-a94ae5da-1291-4194-a0a7-fa8d95eb4821 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] Start building networks asynchronously for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 745.092601] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a94ae5da-1291-4194-a0a7-fa8d95eb4821 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg 73cde8ea53b74408a076c2be796083e5 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 745.092601] env[62096]: DEBUG oslo_concurrency.lockutils [None req-31582930-cf6e-43cb-a675-23470985f47b tempest-ServerActionsV293TestJSON-192366267 tempest-ServerActionsV293TestJSON-192366267-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.385s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 745.092601] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-31582930-cf6e-43cb-a675-23470985f47b tempest-ServerActionsV293TestJSON-192366267 tempest-ServerActionsV293TestJSON-192366267-project-member] Expecting reply to msg d77177bbf177426faac884176d83cdfc in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 745.122881] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 73cde8ea53b74408a076c2be796083e5 [ 745.124661] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d77177bbf177426faac884176d83cdfc [ 745.307395] env[62096]: DEBUG nova.network.neutron [-] [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 745.307874] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 2ceb543bfea24c50897194f00df5922a in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 745.317396] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2ceb543bfea24c50897194f00df5922a [ 745.597728] env[62096]: DEBUG nova.compute.utils [None req-a94ae5da-1291-4194-a0a7-fa8d95eb4821 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Using /dev/sd instead of None {{(pid=62096) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 745.597728] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a94ae5da-1291-4194-a0a7-fa8d95eb4821 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg 036321113e6245baa5c46189789488b4 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 745.597728] env[62096]: DEBUG nova.compute.manager [None req-a94ae5da-1291-4194-a0a7-fa8d95eb4821 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] Allocating IP information in the background. {{(pid=62096) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 745.597728] env[62096]: DEBUG nova.network.neutron [None req-a94ae5da-1291-4194-a0a7-fa8d95eb4821 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] allocate_for_instance() {{(pid=62096) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 745.609219] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 036321113e6245baa5c46189789488b4 [ 745.625953] env[62096]: ERROR nova.compute.manager [None req-d5f614b0-b494-4335-b00e-6b8de6a76cbe tempest-ServersWithSpecificFlavorTestJSON-68500714 tempest-ServersWithSpecificFlavorTestJSON-68500714-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 64fe9acd-2f73-403b-b490-b6600bb258e3, please check neutron logs for more information. [ 745.625953] env[62096]: ERROR nova.compute.manager Traceback (most recent call last): [ 745.625953] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 745.625953] env[62096]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 745.625953] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 745.625953] env[62096]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 745.625953] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 745.625953] env[62096]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 745.625953] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 745.625953] env[62096]: ERROR nova.compute.manager self.force_reraise() [ 745.625953] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 745.625953] env[62096]: ERROR nova.compute.manager raise self.value [ 745.625953] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 745.625953] env[62096]: ERROR nova.compute.manager updated_port = self._update_port( [ 745.625953] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 745.625953] env[62096]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 745.626506] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 745.626506] env[62096]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 745.626506] env[62096]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 64fe9acd-2f73-403b-b490-b6600bb258e3, please check neutron logs for more information. [ 745.626506] env[62096]: ERROR nova.compute.manager [ 745.626970] env[62096]: Traceback (most recent call last): [ 745.627111] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 745.627111] env[62096]: listener.cb(fileno) [ 745.627214] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 745.627214] env[62096]: result = function(*args, **kwargs) [ 745.627306] env[62096]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 745.627306] env[62096]: return func(*args, **kwargs) [ 745.627394] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 745.627394] env[62096]: raise e [ 745.627481] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 745.627481] env[62096]: nwinfo = self.network_api.allocate_for_instance( [ 745.627579] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 745.627579] env[62096]: created_port_ids = self._update_ports_for_instance( [ 745.627675] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 745.627675] env[62096]: with excutils.save_and_reraise_exception(): [ 745.627762] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 745.627762] env[62096]: self.force_reraise() [ 745.627850] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 745.627850] env[62096]: raise self.value [ 745.627990] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 745.627990] env[62096]: updated_port = self._update_port( [ 745.628113] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 745.628113] env[62096]: _ensure_no_port_binding_failure(port) [ 745.628201] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 745.628201] env[62096]: raise exception.PortBindingFailed(port_id=port['id']) [ 745.628297] env[62096]: nova.exception.PortBindingFailed: Binding failed for port 64fe9acd-2f73-403b-b490-b6600bb258e3, please check neutron logs for more information. [ 745.628576] env[62096]: Removing descriptor: 16 [ 745.629507] env[62096]: ERROR nova.compute.manager [None req-d5f614b0-b494-4335-b00e-6b8de6a76cbe tempest-ServersWithSpecificFlavorTestJSON-68500714 tempest-ServersWithSpecificFlavorTestJSON-68500714-project-member] [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 64fe9acd-2f73-403b-b490-b6600bb258e3, please check neutron logs for more information. [ 745.629507] env[62096]: ERROR nova.compute.manager [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] Traceback (most recent call last): [ 745.629507] env[62096]: ERROR nova.compute.manager [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 745.629507] env[62096]: ERROR nova.compute.manager [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] yield resources [ 745.629507] env[62096]: ERROR nova.compute.manager [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 745.629507] env[62096]: ERROR nova.compute.manager [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] self.driver.spawn(context, instance, image_meta, [ 745.629507] env[62096]: ERROR nova.compute.manager [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 745.629507] env[62096]: ERROR nova.compute.manager [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 745.629507] env[62096]: ERROR nova.compute.manager [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 745.629507] env[62096]: ERROR nova.compute.manager [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] vm_ref = self.build_virtual_machine(instance, [ 745.629507] env[62096]: ERROR nova.compute.manager [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 745.629877] env[62096]: ERROR nova.compute.manager [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] vif_infos = vmwarevif.get_vif_info(self._session, [ 745.629877] env[62096]: ERROR nova.compute.manager [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 745.629877] env[62096]: ERROR nova.compute.manager [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] for vif in network_info: [ 745.629877] env[62096]: ERROR nova.compute.manager [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 745.629877] env[62096]: ERROR nova.compute.manager [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] return self._sync_wrapper(fn, *args, **kwargs) [ 745.629877] env[62096]: ERROR nova.compute.manager [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 745.629877] env[62096]: ERROR nova.compute.manager [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] self.wait() [ 745.629877] env[62096]: ERROR nova.compute.manager [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 745.629877] env[62096]: ERROR nova.compute.manager [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] self[:] = self._gt.wait() [ 745.629877] env[62096]: ERROR nova.compute.manager [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 745.629877] env[62096]: ERROR nova.compute.manager [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] return self._exit_event.wait() [ 745.629877] env[62096]: ERROR nova.compute.manager [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 745.629877] env[62096]: ERROR nova.compute.manager [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] result = hub.switch() [ 745.630352] env[62096]: ERROR nova.compute.manager [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 745.630352] env[62096]: ERROR nova.compute.manager [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] return self.greenlet.switch() [ 745.630352] env[62096]: ERROR nova.compute.manager [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 745.630352] env[62096]: ERROR nova.compute.manager [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] result = function(*args, **kwargs) [ 745.630352] env[62096]: ERROR nova.compute.manager [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 745.630352] env[62096]: ERROR nova.compute.manager [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] return func(*args, **kwargs) [ 745.630352] env[62096]: ERROR nova.compute.manager [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 745.630352] env[62096]: ERROR nova.compute.manager [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] raise e [ 745.630352] env[62096]: ERROR nova.compute.manager [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 745.630352] env[62096]: ERROR nova.compute.manager [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] nwinfo = self.network_api.allocate_for_instance( [ 745.630352] env[62096]: ERROR nova.compute.manager [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 745.630352] env[62096]: ERROR nova.compute.manager [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] created_port_ids = self._update_ports_for_instance( [ 745.630352] env[62096]: ERROR nova.compute.manager [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 745.630750] env[62096]: ERROR nova.compute.manager [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] with excutils.save_and_reraise_exception(): [ 745.630750] env[62096]: ERROR nova.compute.manager [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 745.630750] env[62096]: ERROR nova.compute.manager [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] self.force_reraise() [ 745.630750] env[62096]: ERROR nova.compute.manager [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 745.630750] env[62096]: ERROR nova.compute.manager [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] raise self.value [ 745.630750] env[62096]: ERROR nova.compute.manager [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 745.630750] env[62096]: ERROR nova.compute.manager [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] updated_port = self._update_port( [ 745.630750] env[62096]: ERROR nova.compute.manager [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 745.630750] env[62096]: ERROR nova.compute.manager [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] _ensure_no_port_binding_failure(port) [ 745.630750] env[62096]: ERROR nova.compute.manager [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 745.630750] env[62096]: ERROR nova.compute.manager [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] raise exception.PortBindingFailed(port_id=port['id']) [ 745.630750] env[62096]: ERROR nova.compute.manager [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] nova.exception.PortBindingFailed: Binding failed for port 64fe9acd-2f73-403b-b490-b6600bb258e3, please check neutron logs for more information. [ 745.630750] env[62096]: ERROR nova.compute.manager [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] [ 745.631692] env[62096]: INFO nova.compute.manager [None req-d5f614b0-b494-4335-b00e-6b8de6a76cbe tempest-ServersWithSpecificFlavorTestJSON-68500714 tempest-ServersWithSpecificFlavorTestJSON-68500714-project-member] [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] Terminating instance [ 745.637550] env[62096]: DEBUG oslo_concurrency.lockutils [None req-d5f614b0-b494-4335-b00e-6b8de6a76cbe tempest-ServersWithSpecificFlavorTestJSON-68500714 tempest-ServersWithSpecificFlavorTestJSON-68500714-project-member] Acquiring lock "refresh_cache-0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 745.638588] env[62096]: DEBUG oslo_concurrency.lockutils [None req-d5f614b0-b494-4335-b00e-6b8de6a76cbe tempest-ServersWithSpecificFlavorTestJSON-68500714 tempest-ServersWithSpecificFlavorTestJSON-68500714-project-member] Acquired lock "refresh_cache-0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 745.638916] env[62096]: DEBUG nova.network.neutron [None req-d5f614b0-b494-4335-b00e-6b8de6a76cbe tempest-ServersWithSpecificFlavorTestJSON-68500714 tempest-ServersWithSpecificFlavorTestJSON-68500714-project-member] [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 745.639551] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d5f614b0-b494-4335-b00e-6b8de6a76cbe tempest-ServersWithSpecificFlavorTestJSON-68500714 tempest-ServersWithSpecificFlavorTestJSON-68500714-project-member] Expecting reply to msg 8f97dce573974680889eb38e886b4319 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 745.647179] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8f97dce573974680889eb38e886b4319 [ 745.659121] env[62096]: DEBUG nova.policy [None req-a94ae5da-1291-4194-a0a7-fa8d95eb4821 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '09a4673f033c4d139efe4cd9ba4b7560', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd81a5a37184d4b29ad6df7e77dfd3ee4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62096) authorize /opt/stack/nova/nova/policy.py:203}} [ 745.815077] env[62096]: INFO nova.compute.manager [-] [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] Took 1.05 seconds to deallocate network for instance. [ 745.816541] env[62096]: DEBUG nova.compute.claims [None req-fce0ab80-047e-4dc5-9e51-67218a6b1057 tempest-AttachInterfacesUnderV243Test-218968522 tempest-AttachInterfacesUnderV243Test-218968522-project-member] [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] Aborting claim: {{(pid=62096) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 745.816715] env[62096]: DEBUG oslo_concurrency.lockutils [None req-fce0ab80-047e-4dc5-9e51-67218a6b1057 tempest-AttachInterfacesUnderV243Test-218968522 tempest-AttachInterfacesUnderV243Test-218968522-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 746.027460] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cf99cf6-7f24-4995-a75a-cbb26198129a {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.040034] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89a4c00f-a437-43af-8865-59634695c0c9 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.076877] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07612953-6022-44db-ba56-f630abdb12ed {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.084610] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-416345cb-17a4-46e7-9ed8-57e19f5e8ffd {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.110580] env[62096]: DEBUG nova.compute.manager [None req-a94ae5da-1291-4194-a0a7-fa8d95eb4821 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] Start building block device mappings for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 746.112926] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a94ae5da-1291-4194-a0a7-fa8d95eb4821 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg aaad09e49d93437cb415dc9180518317 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 746.120172] env[62096]: DEBUG nova.compute.provider_tree [None req-31582930-cf6e-43cb-a675-23470985f47b tempest-ServerActionsV293TestJSON-192366267 tempest-ServerActionsV293TestJSON-192366267-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 746.120172] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-31582930-cf6e-43cb-a675-23470985f47b tempest-ServerActionsV293TestJSON-192366267 tempest-ServerActionsV293TestJSON-192366267-project-member] Expecting reply to msg db6fe55b7ea9422ab0e1450de308aa8d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 746.127725] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg db6fe55b7ea9422ab0e1450de308aa8d [ 746.153235] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg aaad09e49d93437cb415dc9180518317 [ 746.172755] env[62096]: DEBUG nova.network.neutron [None req-d5f614b0-b494-4335-b00e-6b8de6a76cbe tempest-ServersWithSpecificFlavorTestJSON-68500714 tempest-ServersWithSpecificFlavorTestJSON-68500714-project-member] [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 746.383320] env[62096]: DEBUG nova.network.neutron [None req-d5f614b0-b494-4335-b00e-6b8de6a76cbe tempest-ServersWithSpecificFlavorTestJSON-68500714 tempest-ServersWithSpecificFlavorTestJSON-68500714-project-member] [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 746.383837] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d5f614b0-b494-4335-b00e-6b8de6a76cbe tempest-ServersWithSpecificFlavorTestJSON-68500714 tempest-ServersWithSpecificFlavorTestJSON-68500714-project-member] Expecting reply to msg 44335623e18b483aaa416d660d47aeb4 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 746.394233] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 44335623e18b483aaa416d660d47aeb4 [ 746.403058] env[62096]: DEBUG nova.network.neutron [None req-a94ae5da-1291-4194-a0a7-fa8d95eb4821 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] Successfully created port: 4055f37b-0c31-4752-8dba-88f3373f6782 {{(pid=62096) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 746.621211] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a94ae5da-1291-4194-a0a7-fa8d95eb4821 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg d6ff48f33d1b4532ba845e5c733a85c4 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 746.622702] env[62096]: DEBUG nova.scheduler.client.report [None req-31582930-cf6e-43cb-a675-23470985f47b tempest-ServerActionsV293TestJSON-192366267 tempest-ServerActionsV293TestJSON-192366267-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 746.625045] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-31582930-cf6e-43cb-a675-23470985f47b tempest-ServerActionsV293TestJSON-192366267 tempest-ServerActionsV293TestJSON-192366267-project-member] Expecting reply to msg 483367fa35f848838164491ac157f0fa in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 746.642202] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 483367fa35f848838164491ac157f0fa [ 746.655545] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d6ff48f33d1b4532ba845e5c733a85c4 [ 746.757374] env[62096]: DEBUG nova.compute.manager [req-347f26c0-67d2-4303-9914-9cae86b53849 req-07b7e04a-24c0-4aaf-9624-9fd5dca4a692 service nova] [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] Received event network-changed-64fe9acd-2f73-403b-b490-b6600bb258e3 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 746.757579] env[62096]: DEBUG nova.compute.manager [req-347f26c0-67d2-4303-9914-9cae86b53849 req-07b7e04a-24c0-4aaf-9624-9fd5dca4a692 service nova] [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] Refreshing instance network info cache due to event network-changed-64fe9acd-2f73-403b-b490-b6600bb258e3. {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 746.757769] env[62096]: DEBUG oslo_concurrency.lockutils [req-347f26c0-67d2-4303-9914-9cae86b53849 req-07b7e04a-24c0-4aaf-9624-9fd5dca4a692 service nova] Acquiring lock "refresh_cache-0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 746.892185] env[62096]: DEBUG oslo_concurrency.lockutils [None req-d5f614b0-b494-4335-b00e-6b8de6a76cbe tempest-ServersWithSpecificFlavorTestJSON-68500714 tempest-ServersWithSpecificFlavorTestJSON-68500714-project-member] Releasing lock "refresh_cache-0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 746.892185] env[62096]: DEBUG nova.compute.manager [None req-d5f614b0-b494-4335-b00e-6b8de6a76cbe tempest-ServersWithSpecificFlavorTestJSON-68500714 tempest-ServersWithSpecificFlavorTestJSON-68500714-project-member] [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] Start destroying the instance on the hypervisor. {{(pid=62096) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 746.892185] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-d5f614b0-b494-4335-b00e-6b8de6a76cbe tempest-ServersWithSpecificFlavorTestJSON-68500714 tempest-ServersWithSpecificFlavorTestJSON-68500714-project-member] [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] Destroying instance {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 746.892185] env[62096]: DEBUG oslo_concurrency.lockutils [req-347f26c0-67d2-4303-9914-9cae86b53849 req-07b7e04a-24c0-4aaf-9624-9fd5dca4a692 service nova] Acquired lock "refresh_cache-0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 746.892185] env[62096]: DEBUG nova.network.neutron [req-347f26c0-67d2-4303-9914-9cae86b53849 req-07b7e04a-24c0-4aaf-9624-9fd5dca4a692 service nova] [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] Refreshing network info cache for port 64fe9acd-2f73-403b-b490-b6600bb258e3 {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 746.892444] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-347f26c0-67d2-4303-9914-9cae86b53849 req-07b7e04a-24c0-4aaf-9624-9fd5dca4a692 service nova] Expecting reply to msg 068cb8288b5a4576bde678c34266a372 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 746.892444] env[62096]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-14c0b686-8d00-4344-b04c-ce1903d4f00d {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.895947] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 068cb8288b5a4576bde678c34266a372 [ 746.898924] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bba84e3a-6547-4e43-853b-cba4da52fa84 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.920548] env[62096]: WARNING nova.virt.vmwareapi.vmops [None req-d5f614b0-b494-4335-b00e-6b8de6a76cbe tempest-ServersWithSpecificFlavorTestJSON-68500714 tempest-ServersWithSpecificFlavorTestJSON-68500714-project-member] [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c could not be found. [ 746.920548] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-d5f614b0-b494-4335-b00e-6b8de6a76cbe tempest-ServersWithSpecificFlavorTestJSON-68500714 tempest-ServersWithSpecificFlavorTestJSON-68500714-project-member] [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] Instance destroyed {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 746.920711] env[62096]: INFO nova.compute.manager [None req-d5f614b0-b494-4335-b00e-6b8de6a76cbe tempest-ServersWithSpecificFlavorTestJSON-68500714 tempest-ServersWithSpecificFlavorTestJSON-68500714-project-member] [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] Took 0.03 seconds to destroy the instance on the hypervisor. [ 746.920845] env[62096]: DEBUG oslo.service.loopingcall [None req-d5f614b0-b494-4335-b00e-6b8de6a76cbe tempest-ServersWithSpecificFlavorTestJSON-68500714 tempest-ServersWithSpecificFlavorTestJSON-68500714-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 746.921058] env[62096]: DEBUG nova.compute.manager [-] [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 746.921145] env[62096]: DEBUG nova.network.neutron [-] [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 746.948639] env[62096]: DEBUG nova.network.neutron [-] [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 746.949281] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg b2cbe56b5e294e9f8b507b96b52efd86 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 746.956976] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b2cbe56b5e294e9f8b507b96b52efd86 [ 747.128163] env[62096]: DEBUG nova.compute.manager [None req-a94ae5da-1291-4194-a0a7-fa8d95eb4821 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] Start spawning the instance on the hypervisor. {{(pid=62096) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 747.131181] env[62096]: DEBUG oslo_concurrency.lockutils [None req-31582930-cf6e-43cb-a675-23470985f47b tempest-ServerActionsV293TestJSON-192366267 tempest-ServerActionsV293TestJSON-192366267-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.044s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 747.131539] env[62096]: ERROR nova.compute.manager [None req-31582930-cf6e-43cb-a675-23470985f47b tempest-ServerActionsV293TestJSON-192366267 tempest-ServerActionsV293TestJSON-192366267-project-member] [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port c981a4c6-a985-4393-9bca-e1f2dd45c026, please check neutron logs for more information. [ 747.131539] env[62096]: ERROR nova.compute.manager [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] Traceback (most recent call last): [ 747.131539] env[62096]: ERROR nova.compute.manager [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 747.131539] env[62096]: ERROR nova.compute.manager [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] self.driver.spawn(context, instance, image_meta, [ 747.131539] env[62096]: ERROR nova.compute.manager [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 747.131539] env[62096]: ERROR nova.compute.manager [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] self._vmops.spawn(context, instance, image_meta, injected_files, [ 747.131539] env[62096]: ERROR nova.compute.manager [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 747.131539] env[62096]: ERROR nova.compute.manager [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] vm_ref = self.build_virtual_machine(instance, [ 747.131539] env[62096]: ERROR nova.compute.manager [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 747.131539] env[62096]: ERROR nova.compute.manager [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] vif_infos = vmwarevif.get_vif_info(self._session, [ 747.131539] env[62096]: ERROR nova.compute.manager [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 747.131913] env[62096]: ERROR nova.compute.manager [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] for vif in network_info: [ 747.131913] env[62096]: ERROR nova.compute.manager [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 747.131913] env[62096]: ERROR nova.compute.manager [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] return self._sync_wrapper(fn, *args, **kwargs) [ 747.131913] env[62096]: ERROR nova.compute.manager [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 747.131913] env[62096]: ERROR nova.compute.manager [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] self.wait() [ 747.131913] env[62096]: ERROR nova.compute.manager [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 747.131913] env[62096]: ERROR nova.compute.manager [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] self[:] = self._gt.wait() [ 747.131913] env[62096]: ERROR nova.compute.manager [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 747.131913] env[62096]: ERROR nova.compute.manager [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] return self._exit_event.wait() [ 747.131913] env[62096]: ERROR nova.compute.manager [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 747.131913] env[62096]: ERROR nova.compute.manager [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] current.throw(*self._exc) [ 747.131913] env[62096]: ERROR nova.compute.manager [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 747.131913] env[62096]: ERROR nova.compute.manager [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] result = function(*args, **kwargs) [ 747.132334] env[62096]: ERROR nova.compute.manager [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 747.132334] env[62096]: ERROR nova.compute.manager [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] return func(*args, **kwargs) [ 747.132334] env[62096]: ERROR nova.compute.manager [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 747.132334] env[62096]: ERROR nova.compute.manager [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] raise e [ 747.132334] env[62096]: ERROR nova.compute.manager [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 747.132334] env[62096]: ERROR nova.compute.manager [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] nwinfo = self.network_api.allocate_for_instance( [ 747.132334] env[62096]: ERROR nova.compute.manager [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 747.132334] env[62096]: ERROR nova.compute.manager [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] created_port_ids = self._update_ports_for_instance( [ 747.132334] env[62096]: ERROR nova.compute.manager [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 747.132334] env[62096]: ERROR nova.compute.manager [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] with excutils.save_and_reraise_exception(): [ 747.132334] env[62096]: ERROR nova.compute.manager [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 747.132334] env[62096]: ERROR nova.compute.manager [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] self.force_reraise() [ 747.132334] env[62096]: ERROR nova.compute.manager [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 747.132739] env[62096]: ERROR nova.compute.manager [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] raise self.value [ 747.132739] env[62096]: ERROR nova.compute.manager [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 747.132739] env[62096]: ERROR nova.compute.manager [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] updated_port = self._update_port( [ 747.132739] env[62096]: ERROR nova.compute.manager [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 747.132739] env[62096]: ERROR nova.compute.manager [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] _ensure_no_port_binding_failure(port) [ 747.132739] env[62096]: ERROR nova.compute.manager [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 747.132739] env[62096]: ERROR nova.compute.manager [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] raise exception.PortBindingFailed(port_id=port['id']) [ 747.132739] env[62096]: ERROR nova.compute.manager [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] nova.exception.PortBindingFailed: Binding failed for port c981a4c6-a985-4393-9bca-e1f2dd45c026, please check neutron logs for more information. [ 747.132739] env[62096]: ERROR nova.compute.manager [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] [ 747.132739] env[62096]: DEBUG nova.compute.utils [None req-31582930-cf6e-43cb-a675-23470985f47b tempest-ServerActionsV293TestJSON-192366267 tempest-ServerActionsV293TestJSON-192366267-project-member] [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] Binding failed for port c981a4c6-a985-4393-9bca-e1f2dd45c026, please check neutron logs for more information. {{(pid=62096) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 747.133402] env[62096]: DEBUG oslo_concurrency.lockutils [None req-5b2f59bb-27e5-498c-aa13-1fed23947587 tempest-ServerRescueTestJSON-190058497 tempest-ServerRescueTestJSON-190058497-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.682s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 747.135096] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-5b2f59bb-27e5-498c-aa13-1fed23947587 tempest-ServerRescueTestJSON-190058497 tempest-ServerRescueTestJSON-190058497-project-member] Expecting reply to msg 35420154f6034b86bb2829feda2af8c7 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 747.138492] env[62096]: DEBUG nova.compute.manager [None req-31582930-cf6e-43cb-a675-23470985f47b tempest-ServerActionsV293TestJSON-192366267 tempest-ServerActionsV293TestJSON-192366267-project-member] [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] Build of instance 03570c3d-3ca5-495d-8a52-2f86b280f667 was re-scheduled: Binding failed for port c981a4c6-a985-4393-9bca-e1f2dd45c026, please check neutron logs for more information. {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 747.138492] env[62096]: DEBUG nova.compute.manager [None req-31582930-cf6e-43cb-a675-23470985f47b tempest-ServerActionsV293TestJSON-192366267 tempest-ServerActionsV293TestJSON-192366267-project-member] [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] Unplugging VIFs for instance {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 747.138492] env[62096]: DEBUG oslo_concurrency.lockutils [None req-31582930-cf6e-43cb-a675-23470985f47b tempest-ServerActionsV293TestJSON-192366267 tempest-ServerActionsV293TestJSON-192366267-project-member] Acquiring lock "refresh_cache-03570c3d-3ca5-495d-8a52-2f86b280f667" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 747.138492] env[62096]: DEBUG oslo_concurrency.lockutils [None req-31582930-cf6e-43cb-a675-23470985f47b tempest-ServerActionsV293TestJSON-192366267 tempest-ServerActionsV293TestJSON-192366267-project-member] Acquired lock "refresh_cache-03570c3d-3ca5-495d-8a52-2f86b280f667" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 747.138770] env[62096]: DEBUG nova.network.neutron [None req-31582930-cf6e-43cb-a675-23470985f47b tempest-ServerActionsV293TestJSON-192366267 tempest-ServerActionsV293TestJSON-192366267-project-member] [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 747.138770] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-31582930-cf6e-43cb-a675-23470985f47b tempest-ServerActionsV293TestJSON-192366267 tempest-ServerActionsV293TestJSON-192366267-project-member] Expecting reply to msg f4888b78b9694abda467b6b2660bde33 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 747.153103] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f4888b78b9694abda467b6b2660bde33 [ 747.157435] env[62096]: DEBUG nova.virt.hardware [None req-a94ae5da-1291-4194-a0a7-fa8d95eb4821 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-19T11:52:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-19T11:51:55Z,direct_url=,disk_format='vmdk',id=fb764baa-3805-45c4-a694-aa91b0932110,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bd76085d004b408bbcf1ebf76ba2593e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-19T11:51:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 747.157953] env[62096]: DEBUG nova.virt.hardware [None req-a94ae5da-1291-4194-a0a7-fa8d95eb4821 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Flavor limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 747.158066] env[62096]: DEBUG nova.virt.hardware [None req-a94ae5da-1291-4194-a0a7-fa8d95eb4821 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Image limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 747.158261] env[62096]: DEBUG nova.virt.hardware [None req-a94ae5da-1291-4194-a0a7-fa8d95eb4821 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Flavor pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 747.158406] env[62096]: DEBUG nova.virt.hardware [None req-a94ae5da-1291-4194-a0a7-fa8d95eb4821 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Image pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 747.158551] env[62096]: DEBUG nova.virt.hardware [None req-a94ae5da-1291-4194-a0a7-fa8d95eb4821 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 747.158754] env[62096]: DEBUG nova.virt.hardware [None req-a94ae5da-1291-4194-a0a7-fa8d95eb4821 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 747.158909] env[62096]: DEBUG nova.virt.hardware [None req-a94ae5da-1291-4194-a0a7-fa8d95eb4821 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 747.159084] env[62096]: DEBUG nova.virt.hardware [None req-a94ae5da-1291-4194-a0a7-fa8d95eb4821 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Got 1 possible topologies {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 747.159225] env[62096]: DEBUG nova.virt.hardware [None req-a94ae5da-1291-4194-a0a7-fa8d95eb4821 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 747.159407] env[62096]: DEBUG nova.virt.hardware [None req-a94ae5da-1291-4194-a0a7-fa8d95eb4821 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 747.160539] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be55d92c-07ae-4fd5-8416-bd65008aa576 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.169567] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd1bfb85-99b0-4e92-975d-999dc3fee78e {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.183809] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 35420154f6034b86bb2829feda2af8c7 [ 747.423060] env[62096]: DEBUG nova.network.neutron [req-347f26c0-67d2-4303-9914-9cae86b53849 req-07b7e04a-24c0-4aaf-9624-9fd5dca4a692 service nova] [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 747.451939] env[62096]: DEBUG nova.network.neutron [-] [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 747.452434] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg eef4c2e438574ff7a10906b8acd1c7ba in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 747.462400] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg eef4c2e438574ff7a10906b8acd1c7ba [ 747.642394] env[62096]: DEBUG nova.network.neutron [req-347f26c0-67d2-4303-9914-9cae86b53849 req-07b7e04a-24c0-4aaf-9624-9fd5dca4a692 service nova] [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 747.642869] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-347f26c0-67d2-4303-9914-9cae86b53849 req-07b7e04a-24c0-4aaf-9624-9fd5dca4a692 service nova] Expecting reply to msg 8f532fd1868c49eb8035136c29f2d59f in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 747.650383] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8f532fd1868c49eb8035136c29f2d59f [ 747.676534] env[62096]: DEBUG nova.network.neutron [None req-31582930-cf6e-43cb-a675-23470985f47b tempest-ServerActionsV293TestJSON-192366267 tempest-ServerActionsV293TestJSON-192366267-project-member] [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 747.954335] env[62096]: INFO nova.compute.manager [-] [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] Took 1.03 seconds to deallocate network for instance. [ 747.956943] env[62096]: DEBUG nova.compute.claims [None req-d5f614b0-b494-4335-b00e-6b8de6a76cbe tempest-ServersWithSpecificFlavorTestJSON-68500714 tempest-ServersWithSpecificFlavorTestJSON-68500714-project-member] [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] Aborting claim: {{(pid=62096) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 747.957159] env[62096]: DEBUG oslo_concurrency.lockutils [None req-d5f614b0-b494-4335-b00e-6b8de6a76cbe tempest-ServersWithSpecificFlavorTestJSON-68500714 tempest-ServersWithSpecificFlavorTestJSON-68500714-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 748.000345] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdd0508f-ce56-41f3-ab12-ade4f957c4c6 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.008606] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd7d8dd3-fd8f-4dc0-9999-20f4cfacbb8d {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.038415] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f5cdbcf-d665-4a83-ac33-00df6f40023e {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.041764] env[62096]: DEBUG nova.network.neutron [None req-31582930-cf6e-43cb-a675-23470985f47b tempest-ServerActionsV293TestJSON-192366267 tempest-ServerActionsV293TestJSON-192366267-project-member] [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 748.042267] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-31582930-cf6e-43cb-a675-23470985f47b tempest-ServerActionsV293TestJSON-192366267 tempest-ServerActionsV293TestJSON-192366267-project-member] Expecting reply to msg 65825d038fe4404181555e59cf39ea0b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 748.047877] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe01c0de-d966-48ec-8e53-82e8e563c55a {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.061258] env[62096]: DEBUG nova.compute.provider_tree [None req-5b2f59bb-27e5-498c-aa13-1fed23947587 tempest-ServerRescueTestJSON-190058497 tempest-ServerRescueTestJSON-190058497-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 748.061753] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-5b2f59bb-27e5-498c-aa13-1fed23947587 tempest-ServerRescueTestJSON-190058497 tempest-ServerRescueTestJSON-190058497-project-member] Expecting reply to msg 7c7803a557d84b02aefb45a8d0c2cdb0 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 748.064711] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 65825d038fe4404181555e59cf39ea0b [ 748.071849] env[62096]: ERROR nova.compute.manager [None req-a94ae5da-1291-4194-a0a7-fa8d95eb4821 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 4055f37b-0c31-4752-8dba-88f3373f6782, please check neutron logs for more information. [ 748.071849] env[62096]: ERROR nova.compute.manager Traceback (most recent call last): [ 748.071849] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 748.071849] env[62096]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 748.071849] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 748.071849] env[62096]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 748.071849] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 748.071849] env[62096]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 748.071849] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 748.071849] env[62096]: ERROR nova.compute.manager self.force_reraise() [ 748.071849] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 748.071849] env[62096]: ERROR nova.compute.manager raise self.value [ 748.071849] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 748.071849] env[62096]: ERROR nova.compute.manager updated_port = self._update_port( [ 748.071849] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 748.071849] env[62096]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 748.072438] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 748.072438] env[62096]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 748.072438] env[62096]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 4055f37b-0c31-4752-8dba-88f3373f6782, please check neutron logs for more information. [ 748.072438] env[62096]: ERROR nova.compute.manager [ 748.072438] env[62096]: Traceback (most recent call last): [ 748.072438] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 748.072438] env[62096]: listener.cb(fileno) [ 748.072438] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 748.072438] env[62096]: result = function(*args, **kwargs) [ 748.072438] env[62096]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 748.072438] env[62096]: return func(*args, **kwargs) [ 748.072438] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 748.072438] env[62096]: raise e [ 748.072438] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 748.072438] env[62096]: nwinfo = self.network_api.allocate_for_instance( [ 748.072438] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 748.072438] env[62096]: created_port_ids = self._update_ports_for_instance( [ 748.072438] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 748.072438] env[62096]: with excutils.save_and_reraise_exception(): [ 748.072438] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 748.072438] env[62096]: self.force_reraise() [ 748.072438] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 748.072438] env[62096]: raise self.value [ 748.072438] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 748.072438] env[62096]: updated_port = self._update_port( [ 748.072438] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 748.072438] env[62096]: _ensure_no_port_binding_failure(port) [ 748.072438] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 748.072438] env[62096]: raise exception.PortBindingFailed(port_id=port['id']) [ 748.073632] env[62096]: nova.exception.PortBindingFailed: Binding failed for port 4055f37b-0c31-4752-8dba-88f3373f6782, please check neutron logs for more information. [ 748.073632] env[62096]: Removing descriptor: 19 [ 748.073632] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7c7803a557d84b02aefb45a8d0c2cdb0 [ 748.073632] env[62096]: ERROR nova.compute.manager [None req-a94ae5da-1291-4194-a0a7-fa8d95eb4821 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 4055f37b-0c31-4752-8dba-88f3373f6782, please check neutron logs for more information. [ 748.073632] env[62096]: ERROR nova.compute.manager [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] Traceback (most recent call last): [ 748.073632] env[62096]: ERROR nova.compute.manager [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 748.073632] env[62096]: ERROR nova.compute.manager [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] yield resources [ 748.073632] env[62096]: ERROR nova.compute.manager [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 748.073632] env[62096]: ERROR nova.compute.manager [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] self.driver.spawn(context, instance, image_meta, [ 748.073632] env[62096]: ERROR nova.compute.manager [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 748.073632] env[62096]: ERROR nova.compute.manager [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] self._vmops.spawn(context, instance, image_meta, injected_files, [ 748.073632] env[62096]: ERROR nova.compute.manager [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 748.073632] env[62096]: ERROR nova.compute.manager [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] vm_ref = self.build_virtual_machine(instance, [ 748.074131] env[62096]: ERROR nova.compute.manager [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 748.074131] env[62096]: ERROR nova.compute.manager [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] vif_infos = vmwarevif.get_vif_info(self._session, [ 748.074131] env[62096]: ERROR nova.compute.manager [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 748.074131] env[62096]: ERROR nova.compute.manager [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] for vif in network_info: [ 748.074131] env[62096]: ERROR nova.compute.manager [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 748.074131] env[62096]: ERROR nova.compute.manager [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] return self._sync_wrapper(fn, *args, **kwargs) [ 748.074131] env[62096]: ERROR nova.compute.manager [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 748.074131] env[62096]: ERROR nova.compute.manager [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] self.wait() [ 748.074131] env[62096]: ERROR nova.compute.manager [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 748.074131] env[62096]: ERROR nova.compute.manager [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] self[:] = self._gt.wait() [ 748.074131] env[62096]: ERROR nova.compute.manager [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 748.074131] env[62096]: ERROR nova.compute.manager [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] return self._exit_event.wait() [ 748.074131] env[62096]: ERROR nova.compute.manager [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 748.074652] env[62096]: ERROR nova.compute.manager [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] result = hub.switch() [ 748.074652] env[62096]: ERROR nova.compute.manager [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 748.074652] env[62096]: ERROR nova.compute.manager [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] return self.greenlet.switch() [ 748.074652] env[62096]: ERROR nova.compute.manager [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 748.074652] env[62096]: ERROR nova.compute.manager [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] result = function(*args, **kwargs) [ 748.074652] env[62096]: ERROR nova.compute.manager [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 748.074652] env[62096]: ERROR nova.compute.manager [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] return func(*args, **kwargs) [ 748.074652] env[62096]: ERROR nova.compute.manager [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 748.074652] env[62096]: ERROR nova.compute.manager [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] raise e [ 748.074652] env[62096]: ERROR nova.compute.manager [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 748.074652] env[62096]: ERROR nova.compute.manager [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] nwinfo = self.network_api.allocate_for_instance( [ 748.074652] env[62096]: ERROR nova.compute.manager [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 748.074652] env[62096]: ERROR nova.compute.manager [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] created_port_ids = self._update_ports_for_instance( [ 748.075100] env[62096]: ERROR nova.compute.manager [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 748.075100] env[62096]: ERROR nova.compute.manager [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] with excutils.save_and_reraise_exception(): [ 748.075100] env[62096]: ERROR nova.compute.manager [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 748.075100] env[62096]: ERROR nova.compute.manager [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] self.force_reraise() [ 748.075100] env[62096]: ERROR nova.compute.manager [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 748.075100] env[62096]: ERROR nova.compute.manager [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] raise self.value [ 748.075100] env[62096]: ERROR nova.compute.manager [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 748.075100] env[62096]: ERROR nova.compute.manager [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] updated_port = self._update_port( [ 748.075100] env[62096]: ERROR nova.compute.manager [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 748.075100] env[62096]: ERROR nova.compute.manager [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] _ensure_no_port_binding_failure(port) [ 748.075100] env[62096]: ERROR nova.compute.manager [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 748.075100] env[62096]: ERROR nova.compute.manager [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] raise exception.PortBindingFailed(port_id=port['id']) [ 748.075530] env[62096]: ERROR nova.compute.manager [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] nova.exception.PortBindingFailed: Binding failed for port 4055f37b-0c31-4752-8dba-88f3373f6782, please check neutron logs for more information. [ 748.075530] env[62096]: ERROR nova.compute.manager [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] [ 748.075530] env[62096]: INFO nova.compute.manager [None req-a94ae5da-1291-4194-a0a7-fa8d95eb4821 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] Terminating instance [ 748.076347] env[62096]: DEBUG oslo_concurrency.lockutils [None req-a94ae5da-1291-4194-a0a7-fa8d95eb4821 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Acquiring lock "refresh_cache-dafff089-f6e4-4269-bf0e-ea305c11ff36" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 748.076496] env[62096]: DEBUG oslo_concurrency.lockutils [None req-a94ae5da-1291-4194-a0a7-fa8d95eb4821 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Acquired lock "refresh_cache-dafff089-f6e4-4269-bf0e-ea305c11ff36" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 748.076652] env[62096]: DEBUG nova.network.neutron [None req-a94ae5da-1291-4194-a0a7-fa8d95eb4821 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 748.077055] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a94ae5da-1291-4194-a0a7-fa8d95eb4821 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg 59f85b5386014d80bbf0ff67a36aec17 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 748.085432] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 59f85b5386014d80bbf0ff67a36aec17 [ 748.145569] env[62096]: DEBUG oslo_concurrency.lockutils [req-347f26c0-67d2-4303-9914-9cae86b53849 req-07b7e04a-24c0-4aaf-9624-9fd5dca4a692 service nova] Releasing lock "refresh_cache-0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 748.145817] env[62096]: DEBUG nova.compute.manager [req-347f26c0-67d2-4303-9914-9cae86b53849 req-07b7e04a-24c0-4aaf-9624-9fd5dca4a692 service nova] [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] Received event network-vif-deleted-64fe9acd-2f73-403b-b490-b6600bb258e3 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 748.405403] env[62096]: DEBUG oslo_service.periodic_task [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62096) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 748.405651] env[62096]: DEBUG oslo_service.periodic_task [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62096) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 748.406270] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg 56353f9af7f14ded92faa2200f1ae86b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 748.419758] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 56353f9af7f14ded92faa2200f1ae86b [ 748.544996] env[62096]: DEBUG oslo_concurrency.lockutils [None req-31582930-cf6e-43cb-a675-23470985f47b tempest-ServerActionsV293TestJSON-192366267 tempest-ServerActionsV293TestJSON-192366267-project-member] Releasing lock "refresh_cache-03570c3d-3ca5-495d-8a52-2f86b280f667" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 748.545371] env[62096]: DEBUG nova.compute.manager [None req-31582930-cf6e-43cb-a675-23470985f47b tempest-ServerActionsV293TestJSON-192366267 tempest-ServerActionsV293TestJSON-192366267-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 748.545685] env[62096]: DEBUG nova.compute.manager [None req-31582930-cf6e-43cb-a675-23470985f47b tempest-ServerActionsV293TestJSON-192366267 tempest-ServerActionsV293TestJSON-192366267-project-member] [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 748.545920] env[62096]: DEBUG nova.network.neutron [None req-31582930-cf6e-43cb-a675-23470985f47b tempest-ServerActionsV293TestJSON-192366267 tempest-ServerActionsV293TestJSON-192366267-project-member] [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 748.565253] env[62096]: DEBUG nova.scheduler.client.report [None req-5b2f59bb-27e5-498c-aa13-1fed23947587 tempest-ServerRescueTestJSON-190058497 tempest-ServerRescueTestJSON-190058497-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 748.568686] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-5b2f59bb-27e5-498c-aa13-1fed23947587 tempest-ServerRescueTestJSON-190058497 tempest-ServerRescueTestJSON-190058497-project-member] Expecting reply to msg daa13709637c43919d1f9a4b8239c4a9 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 748.570302] env[62096]: DEBUG nova.network.neutron [None req-31582930-cf6e-43cb-a675-23470985f47b tempest-ServerActionsV293TestJSON-192366267 tempest-ServerActionsV293TestJSON-192366267-project-member] [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 748.570761] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-31582930-cf6e-43cb-a675-23470985f47b tempest-ServerActionsV293TestJSON-192366267 tempest-ServerActionsV293TestJSON-192366267-project-member] Expecting reply to msg 3a2e99a9e33343d68631821c0de21426 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 748.577299] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3a2e99a9e33343d68631821c0de21426 [ 748.581346] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg daa13709637c43919d1f9a4b8239c4a9 [ 748.592306] env[62096]: DEBUG nova.network.neutron [None req-a94ae5da-1291-4194-a0a7-fa8d95eb4821 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 748.681733] env[62096]: DEBUG nova.network.neutron [None req-a94ae5da-1291-4194-a0a7-fa8d95eb4821 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 748.682355] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a94ae5da-1291-4194-a0a7-fa8d95eb4821 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg 4545acd67184433eb56e9904c51359ed in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 748.690158] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4545acd67184433eb56e9904c51359ed [ 748.787078] env[62096]: DEBUG nova.compute.manager [req-b916c42f-78fa-4a23-8433-a0005268103b req-7bb97821-c95b-406b-84b0-2c9bd3aabd51 service nova] [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] Received event network-changed-4055f37b-0c31-4752-8dba-88f3373f6782 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 748.787272] env[62096]: DEBUG nova.compute.manager [req-b916c42f-78fa-4a23-8433-a0005268103b req-7bb97821-c95b-406b-84b0-2c9bd3aabd51 service nova] [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] Refreshing instance network info cache due to event network-changed-4055f37b-0c31-4752-8dba-88f3373f6782. {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 748.787516] env[62096]: DEBUG oslo_concurrency.lockutils [req-b916c42f-78fa-4a23-8433-a0005268103b req-7bb97821-c95b-406b-84b0-2c9bd3aabd51 service nova] Acquiring lock "refresh_cache-dafff089-f6e4-4269-bf0e-ea305c11ff36" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 748.911190] env[62096]: DEBUG oslo_service.periodic_task [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62096) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 748.911390] env[62096]: DEBUG nova.compute.manager [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Starting heal instance info cache {{(pid=62096) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9926}} [ 748.911518] env[62096]: DEBUG nova.compute.manager [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Rebuilding the list of instances to heal {{(pid=62096) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 748.912056] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg 39ca8312227546cfa4343a2a486b70e2 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 748.924925] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 39ca8312227546cfa4343a2a486b70e2 [ 749.073185] env[62096]: DEBUG oslo_concurrency.lockutils [None req-5b2f59bb-27e5-498c-aa13-1fed23947587 tempest-ServerRescueTestJSON-190058497 tempest-ServerRescueTestJSON-190058497-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.940s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 749.073825] env[62096]: ERROR nova.compute.manager [None req-5b2f59bb-27e5-498c-aa13-1fed23947587 tempest-ServerRescueTestJSON-190058497 tempest-ServerRescueTestJSON-190058497-project-member] [instance: d38be540-9cd0-428a-b10d-313d2d464b25] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 1c25f8be-60a8-4ef8-b3db-1095b6a81385, please check neutron logs for more information. [ 749.073825] env[62096]: ERROR nova.compute.manager [instance: d38be540-9cd0-428a-b10d-313d2d464b25] Traceback (most recent call last): [ 749.073825] env[62096]: ERROR nova.compute.manager [instance: d38be540-9cd0-428a-b10d-313d2d464b25] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 749.073825] env[62096]: ERROR nova.compute.manager [instance: d38be540-9cd0-428a-b10d-313d2d464b25] self.driver.spawn(context, instance, image_meta, [ 749.073825] env[62096]: ERROR nova.compute.manager [instance: d38be540-9cd0-428a-b10d-313d2d464b25] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 749.073825] env[62096]: ERROR nova.compute.manager [instance: d38be540-9cd0-428a-b10d-313d2d464b25] self._vmops.spawn(context, instance, image_meta, injected_files, [ 749.073825] env[62096]: ERROR nova.compute.manager [instance: d38be540-9cd0-428a-b10d-313d2d464b25] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 749.073825] env[62096]: ERROR nova.compute.manager [instance: d38be540-9cd0-428a-b10d-313d2d464b25] vm_ref = self.build_virtual_machine(instance, [ 749.073825] env[62096]: ERROR nova.compute.manager [instance: d38be540-9cd0-428a-b10d-313d2d464b25] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 749.073825] env[62096]: ERROR nova.compute.manager [instance: d38be540-9cd0-428a-b10d-313d2d464b25] vif_infos = vmwarevif.get_vif_info(self._session, [ 749.073825] env[62096]: ERROR nova.compute.manager [instance: d38be540-9cd0-428a-b10d-313d2d464b25] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 749.075237] env[62096]: ERROR nova.compute.manager [instance: d38be540-9cd0-428a-b10d-313d2d464b25] for vif in network_info: [ 749.075237] env[62096]: ERROR nova.compute.manager [instance: d38be540-9cd0-428a-b10d-313d2d464b25] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 749.075237] env[62096]: ERROR nova.compute.manager [instance: d38be540-9cd0-428a-b10d-313d2d464b25] return self._sync_wrapper(fn, *args, **kwargs) [ 749.075237] env[62096]: ERROR nova.compute.manager [instance: d38be540-9cd0-428a-b10d-313d2d464b25] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 749.075237] env[62096]: ERROR nova.compute.manager [instance: d38be540-9cd0-428a-b10d-313d2d464b25] self.wait() [ 749.075237] env[62096]: ERROR nova.compute.manager [instance: d38be540-9cd0-428a-b10d-313d2d464b25] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 749.075237] env[62096]: ERROR nova.compute.manager [instance: d38be540-9cd0-428a-b10d-313d2d464b25] self[:] = self._gt.wait() [ 749.075237] env[62096]: ERROR nova.compute.manager [instance: d38be540-9cd0-428a-b10d-313d2d464b25] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 749.075237] env[62096]: ERROR nova.compute.manager [instance: d38be540-9cd0-428a-b10d-313d2d464b25] return self._exit_event.wait() [ 749.075237] env[62096]: ERROR nova.compute.manager [instance: d38be540-9cd0-428a-b10d-313d2d464b25] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 749.075237] env[62096]: ERROR nova.compute.manager [instance: d38be540-9cd0-428a-b10d-313d2d464b25] result = hub.switch() [ 749.075237] env[62096]: ERROR nova.compute.manager [instance: d38be540-9cd0-428a-b10d-313d2d464b25] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 749.075237] env[62096]: ERROR nova.compute.manager [instance: d38be540-9cd0-428a-b10d-313d2d464b25] return self.greenlet.switch() [ 749.075664] env[62096]: ERROR nova.compute.manager [instance: d38be540-9cd0-428a-b10d-313d2d464b25] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 749.075664] env[62096]: ERROR nova.compute.manager [instance: d38be540-9cd0-428a-b10d-313d2d464b25] result = function(*args, **kwargs) [ 749.075664] env[62096]: ERROR nova.compute.manager [instance: d38be540-9cd0-428a-b10d-313d2d464b25] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 749.075664] env[62096]: ERROR nova.compute.manager [instance: d38be540-9cd0-428a-b10d-313d2d464b25] return func(*args, **kwargs) [ 749.075664] env[62096]: ERROR nova.compute.manager [instance: d38be540-9cd0-428a-b10d-313d2d464b25] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 749.075664] env[62096]: ERROR nova.compute.manager [instance: d38be540-9cd0-428a-b10d-313d2d464b25] raise e [ 749.075664] env[62096]: ERROR nova.compute.manager [instance: d38be540-9cd0-428a-b10d-313d2d464b25] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 749.075664] env[62096]: ERROR nova.compute.manager [instance: d38be540-9cd0-428a-b10d-313d2d464b25] nwinfo = self.network_api.allocate_for_instance( [ 749.075664] env[62096]: ERROR nova.compute.manager [instance: d38be540-9cd0-428a-b10d-313d2d464b25] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 749.075664] env[62096]: ERROR nova.compute.manager [instance: d38be540-9cd0-428a-b10d-313d2d464b25] created_port_ids = self._update_ports_for_instance( [ 749.075664] env[62096]: ERROR nova.compute.manager [instance: d38be540-9cd0-428a-b10d-313d2d464b25] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 749.075664] env[62096]: ERROR nova.compute.manager [instance: d38be540-9cd0-428a-b10d-313d2d464b25] with excutils.save_and_reraise_exception(): [ 749.075664] env[62096]: ERROR nova.compute.manager [instance: d38be540-9cd0-428a-b10d-313d2d464b25] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 749.076129] env[62096]: ERROR nova.compute.manager [instance: d38be540-9cd0-428a-b10d-313d2d464b25] self.force_reraise() [ 749.076129] env[62096]: ERROR nova.compute.manager [instance: d38be540-9cd0-428a-b10d-313d2d464b25] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 749.076129] env[62096]: ERROR nova.compute.manager [instance: d38be540-9cd0-428a-b10d-313d2d464b25] raise self.value [ 749.076129] env[62096]: ERROR nova.compute.manager [instance: d38be540-9cd0-428a-b10d-313d2d464b25] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 749.076129] env[62096]: ERROR nova.compute.manager [instance: d38be540-9cd0-428a-b10d-313d2d464b25] updated_port = self._update_port( [ 749.076129] env[62096]: ERROR nova.compute.manager [instance: d38be540-9cd0-428a-b10d-313d2d464b25] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 749.076129] env[62096]: ERROR nova.compute.manager [instance: d38be540-9cd0-428a-b10d-313d2d464b25] _ensure_no_port_binding_failure(port) [ 749.076129] env[62096]: ERROR nova.compute.manager [instance: d38be540-9cd0-428a-b10d-313d2d464b25] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 749.076129] env[62096]: ERROR nova.compute.manager [instance: d38be540-9cd0-428a-b10d-313d2d464b25] raise exception.PortBindingFailed(port_id=port['id']) [ 749.076129] env[62096]: ERROR nova.compute.manager [instance: d38be540-9cd0-428a-b10d-313d2d464b25] nova.exception.PortBindingFailed: Binding failed for port 1c25f8be-60a8-4ef8-b3db-1095b6a81385, please check neutron logs for more information. [ 749.076129] env[62096]: ERROR nova.compute.manager [instance: d38be540-9cd0-428a-b10d-313d2d464b25] [ 749.076506] env[62096]: DEBUG nova.compute.utils [None req-5b2f59bb-27e5-498c-aa13-1fed23947587 tempest-ServerRescueTestJSON-190058497 tempest-ServerRescueTestJSON-190058497-project-member] [instance: d38be540-9cd0-428a-b10d-313d2d464b25] Binding failed for port 1c25f8be-60a8-4ef8-b3db-1095b6a81385, please check neutron logs for more information. {{(pid=62096) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 749.076506] env[62096]: DEBUG nova.network.neutron [None req-31582930-cf6e-43cb-a675-23470985f47b tempest-ServerActionsV293TestJSON-192366267 tempest-ServerActionsV293TestJSON-192366267-project-member] [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 749.076506] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-31582930-cf6e-43cb-a675-23470985f47b tempest-ServerActionsV293TestJSON-192366267 tempest-ServerActionsV293TestJSON-192366267-project-member] Expecting reply to msg 159929f1d57b414f9dafb512bf1a79b0 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 749.081550] env[62096]: DEBUG nova.compute.manager [None req-5b2f59bb-27e5-498c-aa13-1fed23947587 tempest-ServerRescueTestJSON-190058497 tempest-ServerRescueTestJSON-190058497-project-member] [instance: d38be540-9cd0-428a-b10d-313d2d464b25] Build of instance d38be540-9cd0-428a-b10d-313d2d464b25 was re-scheduled: Binding failed for port 1c25f8be-60a8-4ef8-b3db-1095b6a81385, please check neutron logs for more information. {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 749.081550] env[62096]: DEBUG nova.compute.manager [None req-5b2f59bb-27e5-498c-aa13-1fed23947587 tempest-ServerRescueTestJSON-190058497 tempest-ServerRescueTestJSON-190058497-project-member] [instance: d38be540-9cd0-428a-b10d-313d2d464b25] Unplugging VIFs for instance {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 749.081550] env[62096]: DEBUG oslo_concurrency.lockutils [None req-5b2f59bb-27e5-498c-aa13-1fed23947587 tempest-ServerRescueTestJSON-190058497 tempest-ServerRescueTestJSON-190058497-project-member] Acquiring lock "refresh_cache-d38be540-9cd0-428a-b10d-313d2d464b25" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 749.081550] env[62096]: DEBUG oslo_concurrency.lockutils [None req-5b2f59bb-27e5-498c-aa13-1fed23947587 tempest-ServerRescueTestJSON-190058497 tempest-ServerRescueTestJSON-190058497-project-member] Acquired lock "refresh_cache-d38be540-9cd0-428a-b10d-313d2d464b25" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 749.082179] env[62096]: DEBUG nova.network.neutron [None req-5b2f59bb-27e5-498c-aa13-1fed23947587 tempest-ServerRescueTestJSON-190058497 tempest-ServerRescueTestJSON-190058497-project-member] [instance: d38be540-9cd0-428a-b10d-313d2d464b25] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 749.082179] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-5b2f59bb-27e5-498c-aa13-1fed23947587 tempest-ServerRescueTestJSON-190058497 tempest-ServerRescueTestJSON-190058497-project-member] Expecting reply to msg dbec7dbb4c64490e8f02fa6898f53ae3 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 749.082179] env[62096]: DEBUG oslo_concurrency.lockutils [None req-f7b2ac44-6793-4b58-b560-5a800041e557 tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.672s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 749.082179] env[62096]: INFO nova.compute.claims [None req-f7b2ac44-6793-4b58-b560-5a800041e557 tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 749.082179] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-f7b2ac44-6793-4b58-b560-5a800041e557 tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Expecting reply to msg 3ef99808d92044e18c444a94feb5b9bc in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 749.086335] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dbec7dbb4c64490e8f02fa6898f53ae3 [ 749.094573] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 159929f1d57b414f9dafb512bf1a79b0 [ 749.117291] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3ef99808d92044e18c444a94feb5b9bc [ 749.184528] env[62096]: DEBUG oslo_concurrency.lockutils [None req-a94ae5da-1291-4194-a0a7-fa8d95eb4821 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Releasing lock "refresh_cache-dafff089-f6e4-4269-bf0e-ea305c11ff36" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 749.184951] env[62096]: DEBUG nova.compute.manager [None req-a94ae5da-1291-4194-a0a7-fa8d95eb4821 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] Start destroying the instance on the hypervisor. {{(pid=62096) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 749.185297] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-a94ae5da-1291-4194-a0a7-fa8d95eb4821 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] Destroying instance {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 749.185621] env[62096]: DEBUG oslo_concurrency.lockutils [req-b916c42f-78fa-4a23-8433-a0005268103b req-7bb97821-c95b-406b-84b0-2c9bd3aabd51 service nova] Acquired lock "refresh_cache-dafff089-f6e4-4269-bf0e-ea305c11ff36" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 749.185796] env[62096]: DEBUG nova.network.neutron [req-b916c42f-78fa-4a23-8433-a0005268103b req-7bb97821-c95b-406b-84b0-2c9bd3aabd51 service nova] [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] Refreshing network info cache for port 4055f37b-0c31-4752-8dba-88f3373f6782 {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 749.186226] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-b916c42f-78fa-4a23-8433-a0005268103b req-7bb97821-c95b-406b-84b0-2c9bd3aabd51 service nova] Expecting reply to msg e1d9540a4b90431593bcdc72f0b7b5f4 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 749.187012] env[62096]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-69adadae-e3cc-4c34-9b31-61a19cc888f9 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.194099] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e1d9540a4b90431593bcdc72f0b7b5f4 [ 749.197523] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-971be504-b81a-41d5-a58f-f72fa7ca1888 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.218863] env[62096]: WARNING nova.virt.vmwareapi.vmops [None req-a94ae5da-1291-4194-a0a7-fa8d95eb4821 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance dafff089-f6e4-4269-bf0e-ea305c11ff36 could not be found. [ 749.219087] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-a94ae5da-1291-4194-a0a7-fa8d95eb4821 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] Instance destroyed {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 749.219258] env[62096]: INFO nova.compute.manager [None req-a94ae5da-1291-4194-a0a7-fa8d95eb4821 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] Took 0.03 seconds to destroy the instance on the hypervisor. [ 749.219489] env[62096]: DEBUG oslo.service.loopingcall [None req-a94ae5da-1291-4194-a0a7-fa8d95eb4821 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 749.219689] env[62096]: DEBUG nova.compute.manager [-] [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 749.219776] env[62096]: DEBUG nova.network.neutron [-] [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 749.237837] env[62096]: DEBUG nova.network.neutron [-] [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 749.238320] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 53e10d7abf4c4d27879a75b69c180d03 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 749.245981] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 53e10d7abf4c4d27879a75b69c180d03 [ 749.415224] env[62096]: DEBUG nova.compute.manager [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] Skipping network cache update for instance because it is Building. {{(pid=62096) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9939}} [ 749.415377] env[62096]: DEBUG nova.compute.manager [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] [instance: 321ab95b-6221-4bab-b442-a90926098dae] Skipping network cache update for instance because it is Building. {{(pid=62096) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9939}} [ 749.415504] env[62096]: DEBUG nova.compute.manager [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] Skipping network cache update for instance because it is Building. {{(pid=62096) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9939}} [ 749.415623] env[62096]: DEBUG nova.compute.manager [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] Skipping network cache update for instance because it is Building. {{(pid=62096) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9939}} [ 749.415740] env[62096]: DEBUG nova.compute.manager [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] Skipping network cache update for instance because it is Building. {{(pid=62096) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9939}} [ 749.415887] env[62096]: DEBUG nova.compute.manager [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Didn't find any instances for network info cache update. {{(pid=62096) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10012}} [ 749.416075] env[62096]: DEBUG oslo_service.periodic_task [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62096) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 749.416277] env[62096]: DEBUG oslo_service.periodic_task [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62096) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 749.416434] env[62096]: DEBUG oslo_service.periodic_task [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62096) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 749.416572] env[62096]: DEBUG oslo_service.periodic_task [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62096) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 749.416702] env[62096]: DEBUG oslo_service.periodic_task [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62096) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 749.416833] env[62096]: DEBUG oslo_service.periodic_task [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62096) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 749.416959] env[62096]: DEBUG nova.compute.manager [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62096) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10545}} [ 749.417105] env[62096]: DEBUG oslo_service.periodic_task [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62096) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 749.417515] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg 490033b25dcc44e582442587949511c7 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 749.427361] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 490033b25dcc44e582442587949511c7 [ 749.584570] env[62096]: INFO nova.compute.manager [None req-31582930-cf6e-43cb-a675-23470985f47b tempest-ServerActionsV293TestJSON-192366267 tempest-ServerActionsV293TestJSON-192366267-project-member] [instance: 03570c3d-3ca5-495d-8a52-2f86b280f667] Took 1.04 seconds to deallocate network for instance. [ 749.586433] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-31582930-cf6e-43cb-a675-23470985f47b tempest-ServerActionsV293TestJSON-192366267 tempest-ServerActionsV293TestJSON-192366267-project-member] Expecting reply to msg 0dc5c8563181468db7ebe78c88a965c0 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 749.590550] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-f7b2ac44-6793-4b58-b560-5a800041e557 tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Expecting reply to msg b8c64be190004b0aa5e80c4f021a6bdf in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 749.597655] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b8c64be190004b0aa5e80c4f021a6bdf [ 749.603615] env[62096]: DEBUG nova.network.neutron [None req-5b2f59bb-27e5-498c-aa13-1fed23947587 tempest-ServerRescueTestJSON-190058497 tempest-ServerRescueTestJSON-190058497-project-member] [instance: d38be540-9cd0-428a-b10d-313d2d464b25] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 749.621840] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0dc5c8563181468db7ebe78c88a965c0 [ 749.693189] env[62096]: DEBUG nova.network.neutron [None req-5b2f59bb-27e5-498c-aa13-1fed23947587 tempest-ServerRescueTestJSON-190058497 tempest-ServerRescueTestJSON-190058497-project-member] [instance: d38be540-9cd0-428a-b10d-313d2d464b25] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 749.693789] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-5b2f59bb-27e5-498c-aa13-1fed23947587 tempest-ServerRescueTestJSON-190058497 tempest-ServerRescueTestJSON-190058497-project-member] Expecting reply to msg 7f01c50aac2e49a794ee90a2c64cc0fd in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 749.701531] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7f01c50aac2e49a794ee90a2c64cc0fd [ 749.712188] env[62096]: DEBUG nova.network.neutron [req-b916c42f-78fa-4a23-8433-a0005268103b req-7bb97821-c95b-406b-84b0-2c9bd3aabd51 service nova] [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 749.741956] env[62096]: DEBUG nova.network.neutron [-] [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 749.741956] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 586e1a02500045b8916b63b3e4467ef2 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 749.751280] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 586e1a02500045b8916b63b3e4467ef2 [ 749.800786] env[62096]: DEBUG nova.network.neutron [req-b916c42f-78fa-4a23-8433-a0005268103b req-7bb97821-c95b-406b-84b0-2c9bd3aabd51 service nova] [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 749.801318] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-b916c42f-78fa-4a23-8433-a0005268103b req-7bb97821-c95b-406b-84b0-2c9bd3aabd51 service nova] Expecting reply to msg 6337fecad3ae420ba7068325caa9c84b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 749.809023] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6337fecad3ae420ba7068325caa9c84b [ 749.921700] env[62096]: DEBUG oslo_concurrency.lockutils [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 750.092243] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-31582930-cf6e-43cb-a675-23470985f47b tempest-ServerActionsV293TestJSON-192366267 tempest-ServerActionsV293TestJSON-192366267-project-member] Expecting reply to msg 418c6f07dfc14343acc2c780fff71f5a in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 750.126053] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 418c6f07dfc14343acc2c780fff71f5a [ 750.196132] env[62096]: DEBUG oslo_concurrency.lockutils [None req-5b2f59bb-27e5-498c-aa13-1fed23947587 tempest-ServerRescueTestJSON-190058497 tempest-ServerRescueTestJSON-190058497-project-member] Releasing lock "refresh_cache-d38be540-9cd0-428a-b10d-313d2d464b25" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 750.196392] env[62096]: DEBUG nova.compute.manager [None req-5b2f59bb-27e5-498c-aa13-1fed23947587 tempest-ServerRescueTestJSON-190058497 tempest-ServerRescueTestJSON-190058497-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 750.196562] env[62096]: DEBUG nova.compute.manager [None req-5b2f59bb-27e5-498c-aa13-1fed23947587 tempest-ServerRescueTestJSON-190058497 tempest-ServerRescueTestJSON-190058497-project-member] [instance: d38be540-9cd0-428a-b10d-313d2d464b25] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 750.196727] env[62096]: DEBUG nova.network.neutron [None req-5b2f59bb-27e5-498c-aa13-1fed23947587 tempest-ServerRescueTestJSON-190058497 tempest-ServerRescueTestJSON-190058497-project-member] [instance: d38be540-9cd0-428a-b10d-313d2d464b25] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 750.213354] env[62096]: DEBUG nova.network.neutron [None req-5b2f59bb-27e5-498c-aa13-1fed23947587 tempest-ServerRescueTestJSON-190058497 tempest-ServerRescueTestJSON-190058497-project-member] [instance: d38be540-9cd0-428a-b10d-313d2d464b25] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 750.213878] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-5b2f59bb-27e5-498c-aa13-1fed23947587 tempest-ServerRescueTestJSON-190058497 tempest-ServerRescueTestJSON-190058497-project-member] Expecting reply to msg 392bab76851e4e1c82a439fd27927665 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 750.221956] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 392bab76851e4e1c82a439fd27927665 [ 750.243518] env[62096]: INFO nova.compute.manager [-] [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] Took 1.02 seconds to deallocate network for instance. [ 750.245872] env[62096]: DEBUG nova.compute.claims [None req-a94ae5da-1291-4194-a0a7-fa8d95eb4821 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] Aborting claim: {{(pid=62096) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 750.245979] env[62096]: DEBUG oslo_concurrency.lockutils [None req-a94ae5da-1291-4194-a0a7-fa8d95eb4821 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 750.303543] env[62096]: DEBUG oslo_concurrency.lockutils [req-b916c42f-78fa-4a23-8433-a0005268103b req-7bb97821-c95b-406b-84b0-2c9bd3aabd51 service nova] Releasing lock "refresh_cache-dafff089-f6e4-4269-bf0e-ea305c11ff36" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 750.303788] env[62096]: DEBUG nova.compute.manager [req-b916c42f-78fa-4a23-8433-a0005268103b req-7bb97821-c95b-406b-84b0-2c9bd3aabd51 service nova] [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] Received event network-vif-deleted-4055f37b-0c31-4752-8dba-88f3373f6782 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 750.410457] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1288d7a-28bc-46a9-9464-8f0a6a5d7d17 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.418054] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-236fca70-a05c-48b0-8328-7c0f8338badc {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.447394] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3935e19-c440-4feb-ac8c-082c95b2ab3f {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.454709] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6357a52d-c90a-451a-b28c-a61f3d5fdd34 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.467601] env[62096]: DEBUG nova.compute.provider_tree [None req-f7b2ac44-6793-4b58-b560-5a800041e557 tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 750.468147] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-f7b2ac44-6793-4b58-b560-5a800041e557 tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Expecting reply to msg b414ec72a9b04f759f473028239a1e6f in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 750.475317] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b414ec72a9b04f759f473028239a1e6f [ 750.617461] env[62096]: INFO nova.scheduler.client.report [None req-31582930-cf6e-43cb-a675-23470985f47b tempest-ServerActionsV293TestJSON-192366267 tempest-ServerActionsV293TestJSON-192366267-project-member] Deleted allocations for instance 03570c3d-3ca5-495d-8a52-2f86b280f667 [ 750.623568] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-31582930-cf6e-43cb-a675-23470985f47b tempest-ServerActionsV293TestJSON-192366267 tempest-ServerActionsV293TestJSON-192366267-project-member] Expecting reply to msg 5fddac86a3e74992bcc6903d96c1201c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 750.640433] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5fddac86a3e74992bcc6903d96c1201c [ 750.716238] env[62096]: DEBUG nova.network.neutron [None req-5b2f59bb-27e5-498c-aa13-1fed23947587 tempest-ServerRescueTestJSON-190058497 tempest-ServerRescueTestJSON-190058497-project-member] [instance: d38be540-9cd0-428a-b10d-313d2d464b25] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 750.716843] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-5b2f59bb-27e5-498c-aa13-1fed23947587 tempest-ServerRescueTestJSON-190058497 tempest-ServerRescueTestJSON-190058497-project-member] Expecting reply to msg b6c3ab848fbe49b399531d57eff8aa5f in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 750.724965] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b6c3ab848fbe49b399531d57eff8aa5f [ 750.970755] env[62096]: DEBUG nova.scheduler.client.report [None req-f7b2ac44-6793-4b58-b560-5a800041e557 tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 750.973212] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-f7b2ac44-6793-4b58-b560-5a800041e557 tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Expecting reply to msg 1b5bcfec8a8f482f8506665eda7dddd1 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 750.983842] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1b5bcfec8a8f482f8506665eda7dddd1 [ 751.131364] env[62096]: DEBUG oslo_concurrency.lockutils [None req-31582930-cf6e-43cb-a675-23470985f47b tempest-ServerActionsV293TestJSON-192366267 tempest-ServerActionsV293TestJSON-192366267-project-member] Lock "03570c3d-3ca5-495d-8a52-2f86b280f667" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 151.824s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 751.132110] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-f0b7d82a-4b7b-42e3-8d2d-3e9d2e37aade tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg ca935fc34a9e4e8baf9a1115a2091d58 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 751.141110] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ca935fc34a9e4e8baf9a1115a2091d58 [ 751.236608] env[62096]: INFO nova.compute.manager [None req-5b2f59bb-27e5-498c-aa13-1fed23947587 tempest-ServerRescueTestJSON-190058497 tempest-ServerRescueTestJSON-190058497-project-member] [instance: d38be540-9cd0-428a-b10d-313d2d464b25] Took 1.04 seconds to deallocate network for instance. [ 751.238430] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-5b2f59bb-27e5-498c-aa13-1fed23947587 tempest-ServerRescueTestJSON-190058497 tempest-ServerRescueTestJSON-190058497-project-member] Expecting reply to msg 8b3c852112b2406d81562e889ed4ae7f in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 751.288160] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8b3c852112b2406d81562e889ed4ae7f [ 751.476610] env[62096]: DEBUG oslo_concurrency.lockutils [None req-f7b2ac44-6793-4b58-b560-5a800041e557 tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.397s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 751.477172] env[62096]: DEBUG nova.compute.manager [None req-f7b2ac44-6793-4b58-b560-5a800041e557 tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] Start building networks asynchronously for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 751.478923] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-f7b2ac44-6793-4b58-b560-5a800041e557 tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Expecting reply to msg f3819942f37d46e09451bca6ef7ef4fa in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 751.479915] env[62096]: DEBUG oslo_concurrency.lockutils [None req-5fe8118b-4ebb-4166-ab18-d024b5229d8a tempest-ServerRescueTestJSONUnderV235-1957248754 tempest-ServerRescueTestJSONUnderV235-1957248754-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.062s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 751.481340] env[62096]: INFO nova.compute.claims [None req-5fe8118b-4ebb-4166-ab18-d024b5229d8a tempest-ServerRescueTestJSONUnderV235-1957248754 tempest-ServerRescueTestJSONUnderV235-1957248754-project-member] [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 751.482876] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-5fe8118b-4ebb-4166-ab18-d024b5229d8a tempest-ServerRescueTestJSONUnderV235-1957248754 tempest-ServerRescueTestJSONUnderV235-1957248754-project-member] Expecting reply to msg fe08f7bd541244749a8c78bea45a4d0a in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 751.517636] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f3819942f37d46e09451bca6ef7ef4fa [ 751.528648] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fe08f7bd541244749a8c78bea45a4d0a [ 751.634304] env[62096]: DEBUG nova.compute.manager [None req-f0b7d82a-4b7b-42e3-8d2d-3e9d2e37aade tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] Starting instance... {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 751.636204] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-f0b7d82a-4b7b-42e3-8d2d-3e9d2e37aade tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg f5c259849a2d4159b8e213124fb1a8a6 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 751.669821] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f5c259849a2d4159b8e213124fb1a8a6 [ 751.743666] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-5b2f59bb-27e5-498c-aa13-1fed23947587 tempest-ServerRescueTestJSON-190058497 tempest-ServerRescueTestJSON-190058497-project-member] Expecting reply to msg 339e254f5f3946b99bf643efd1614319 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 751.776330] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 339e254f5f3946b99bf643efd1614319 [ 751.994551] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-5fe8118b-4ebb-4166-ab18-d024b5229d8a tempest-ServerRescueTestJSONUnderV235-1957248754 tempest-ServerRescueTestJSONUnderV235-1957248754-project-member] Expecting reply to msg 48136b43dfdc44c48f6fe5730ae0b54f in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 751.996269] env[62096]: DEBUG nova.compute.utils [None req-f7b2ac44-6793-4b58-b560-5a800041e557 tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Using /dev/sd instead of None {{(pid=62096) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 751.996839] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-f7b2ac44-6793-4b58-b560-5a800041e557 tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Expecting reply to msg 0edc57bd0ea54e9b92486ccb4d3a5029 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 751.997715] env[62096]: DEBUG nova.compute.manager [None req-f7b2ac44-6793-4b58-b560-5a800041e557 tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] Allocating IP information in the background. {{(pid=62096) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 751.997885] env[62096]: DEBUG nova.network.neutron [None req-f7b2ac44-6793-4b58-b560-5a800041e557 tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] allocate_for_instance() {{(pid=62096) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 752.004242] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 48136b43dfdc44c48f6fe5730ae0b54f [ 752.013815] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0edc57bd0ea54e9b92486ccb4d3a5029 [ 752.075486] env[62096]: DEBUG nova.policy [None req-f7b2ac44-6793-4b58-b560-5a800041e557 tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '220981d20a1a4f76a40389f3c9a3d761', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7b65a4cddd9a4c728d0ccbd6ce3e59b7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62096) authorize /opt/stack/nova/nova/policy.py:203}} [ 752.163899] env[62096]: DEBUG oslo_concurrency.lockutils [None req-f0b7d82a-4b7b-42e3-8d2d-3e9d2e37aade tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 752.263734] env[62096]: INFO nova.scheduler.client.report [None req-5b2f59bb-27e5-498c-aa13-1fed23947587 tempest-ServerRescueTestJSON-190058497 tempest-ServerRescueTestJSON-190058497-project-member] Deleted allocations for instance d38be540-9cd0-428a-b10d-313d2d464b25 [ 752.269809] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-5b2f59bb-27e5-498c-aa13-1fed23947587 tempest-ServerRescueTestJSON-190058497 tempest-ServerRescueTestJSON-190058497-project-member] Expecting reply to msg f0b6db4f6f144c0892b7b9812dc16619 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 752.287331] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f0b6db4f6f144c0892b7b9812dc16619 [ 752.419069] env[62096]: DEBUG nova.network.neutron [None req-f7b2ac44-6793-4b58-b560-5a800041e557 tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] Successfully created port: f277d93e-a15e-4bae-a5d0-cba98224d99e {{(pid=62096) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 752.501732] env[62096]: DEBUG nova.compute.manager [None req-f7b2ac44-6793-4b58-b560-5a800041e557 tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] Start building block device mappings for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 752.503319] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-f7b2ac44-6793-4b58-b560-5a800041e557 tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Expecting reply to msg 47d1561c8951477995db17f083327722 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 752.539612] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 47d1561c8951477995db17f083327722 [ 752.776394] env[62096]: DEBUG oslo_concurrency.lockutils [None req-5b2f59bb-27e5-498c-aa13-1fed23947587 tempest-ServerRescueTestJSON-190058497 tempest-ServerRescueTestJSON-190058497-project-member] Lock "d38be540-9cd0-428a-b10d-313d2d464b25" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 152.860s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 752.777033] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-36b7f3a2-925c-4aea-b829-50ed82eff8b4 tempest-ImagesNegativeTestJSON-1271305456 tempest-ImagesNegativeTestJSON-1271305456-project-member] Expecting reply to msg 6dd6e27e026240d6a853c5ca37584b8e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 752.788970] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6dd6e27e026240d6a853c5ca37584b8e [ 752.849788] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac7c1694-578b-41f4-ac87-9d9049adc46b {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.859647] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48ab3785-bdcc-44c3-9fc7-86e62587aaad {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.892655] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96c8bd11-f36d-4484-b43e-52914f2131e3 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.901022] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0794e71d-e79e-42c7-9a32-d18b2447af73 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.915086] env[62096]: DEBUG nova.compute.provider_tree [None req-5fe8118b-4ebb-4166-ab18-d024b5229d8a tempest-ServerRescueTestJSONUnderV235-1957248754 tempest-ServerRescueTestJSONUnderV235-1957248754-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 752.916192] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-5fe8118b-4ebb-4166-ab18-d024b5229d8a tempest-ServerRescueTestJSONUnderV235-1957248754 tempest-ServerRescueTestJSONUnderV235-1957248754-project-member] Expecting reply to msg 09481430e1c84b38bea928deea0446f9 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 752.923608] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 09481430e1c84b38bea928deea0446f9 [ 753.012126] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-f7b2ac44-6793-4b58-b560-5a800041e557 tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Expecting reply to msg ecd8cd7ea8f64e7396022bbf5a9d3850 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 753.049876] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ecd8cd7ea8f64e7396022bbf5a9d3850 [ 753.280152] env[62096]: DEBUG nova.compute.manager [None req-36b7f3a2-925c-4aea-b829-50ed82eff8b4 tempest-ImagesNegativeTestJSON-1271305456 tempest-ImagesNegativeTestJSON-1271305456-project-member] [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] Starting instance... {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 753.281882] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-36b7f3a2-925c-4aea-b829-50ed82eff8b4 tempest-ImagesNegativeTestJSON-1271305456 tempest-ImagesNegativeTestJSON-1271305456-project-member] Expecting reply to msg f8d9312804fa4ab6af5c87b3e7a9487d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 753.321458] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f8d9312804fa4ab6af5c87b3e7a9487d [ 753.418626] env[62096]: DEBUG nova.scheduler.client.report [None req-5fe8118b-4ebb-4166-ab18-d024b5229d8a tempest-ServerRescueTestJSONUnderV235-1957248754 tempest-ServerRescueTestJSONUnderV235-1957248754-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 753.422148] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-5fe8118b-4ebb-4166-ab18-d024b5229d8a tempest-ServerRescueTestJSONUnderV235-1957248754 tempest-ServerRescueTestJSONUnderV235-1957248754-project-member] Expecting reply to msg 3df04b1292ef4aeda61c0a27963fd3f5 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 753.433741] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3df04b1292ef4aeda61c0a27963fd3f5 [ 753.515600] env[62096]: DEBUG nova.compute.manager [None req-f7b2ac44-6793-4b58-b560-5a800041e557 tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] Start spawning the instance on the hypervisor. {{(pid=62096) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 753.545392] env[62096]: DEBUG nova.virt.hardware [None req-f7b2ac44-6793-4b58-b560-5a800041e557 tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-19T11:55:29Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='5438b40b-f3ab-4fc4-8ded-8b8464e4dc8a',id=37,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-1092343107',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-19T11:51:55Z,direct_url=,disk_format='vmdk',id=fb764baa-3805-45c4-a694-aa91b0932110,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bd76085d004b408bbcf1ebf76ba2593e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-19T11:51:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 753.545637] env[62096]: DEBUG nova.virt.hardware [None req-f7b2ac44-6793-4b58-b560-5a800041e557 tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Flavor limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 753.546628] env[62096]: DEBUG nova.virt.hardware [None req-f7b2ac44-6793-4b58-b560-5a800041e557 tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Image limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 753.546628] env[62096]: DEBUG nova.virt.hardware [None req-f7b2ac44-6793-4b58-b560-5a800041e557 tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Flavor pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 753.546628] env[62096]: DEBUG nova.virt.hardware [None req-f7b2ac44-6793-4b58-b560-5a800041e557 tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Image pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 753.546628] env[62096]: DEBUG nova.virt.hardware [None req-f7b2ac44-6793-4b58-b560-5a800041e557 tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 753.546628] env[62096]: DEBUG nova.virt.hardware [None req-f7b2ac44-6793-4b58-b560-5a800041e557 tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 753.546895] env[62096]: DEBUG nova.virt.hardware [None req-f7b2ac44-6793-4b58-b560-5a800041e557 tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 753.546895] env[62096]: DEBUG nova.virt.hardware [None req-f7b2ac44-6793-4b58-b560-5a800041e557 tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Got 1 possible topologies {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 753.546895] env[62096]: DEBUG nova.virt.hardware [None req-f7b2ac44-6793-4b58-b560-5a800041e557 tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 753.547054] env[62096]: DEBUG nova.virt.hardware [None req-f7b2ac44-6793-4b58-b560-5a800041e557 tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 753.547912] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ec41871-5a09-47d0-add2-e6839f3d0baa {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.556230] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b22efc4-fc25-4a65-bb1b-4bcb76ead5fc {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.801793] env[62096]: DEBUG oslo_concurrency.lockutils [None req-36b7f3a2-925c-4aea-b829-50ed82eff8b4 tempest-ImagesNegativeTestJSON-1271305456 tempest-ImagesNegativeTestJSON-1271305456-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 753.866729] env[62096]: DEBUG nova.compute.manager [req-b99e59f2-e032-4c6d-a15e-51d0981b8c83 req-a9d6a17a-f9dd-4049-b097-23a24e425b83 service nova] [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] Received event network-changed-f277d93e-a15e-4bae-a5d0-cba98224d99e {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 753.866941] env[62096]: DEBUG nova.compute.manager [req-b99e59f2-e032-4c6d-a15e-51d0981b8c83 req-a9d6a17a-f9dd-4049-b097-23a24e425b83 service nova] [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] Refreshing instance network info cache due to event network-changed-f277d93e-a15e-4bae-a5d0-cba98224d99e. {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 753.867142] env[62096]: DEBUG oslo_concurrency.lockutils [req-b99e59f2-e032-4c6d-a15e-51d0981b8c83 req-a9d6a17a-f9dd-4049-b097-23a24e425b83 service nova] Acquiring lock "refresh_cache-7bfac3e2-f06c-4690-9215-a5f67a67c5bd" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 753.867294] env[62096]: DEBUG oslo_concurrency.lockutils [req-b99e59f2-e032-4c6d-a15e-51d0981b8c83 req-a9d6a17a-f9dd-4049-b097-23a24e425b83 service nova] Acquired lock "refresh_cache-7bfac3e2-f06c-4690-9215-a5f67a67c5bd" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 753.867429] env[62096]: DEBUG nova.network.neutron [req-b99e59f2-e032-4c6d-a15e-51d0981b8c83 req-a9d6a17a-f9dd-4049-b097-23a24e425b83 service nova] [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] Refreshing network info cache for port f277d93e-a15e-4bae-a5d0-cba98224d99e {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 753.867859] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-b99e59f2-e032-4c6d-a15e-51d0981b8c83 req-a9d6a17a-f9dd-4049-b097-23a24e425b83 service nova] Expecting reply to msg e10cf5f1bd7a46dfaae4edf742d250d3 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 753.876215] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e10cf5f1bd7a46dfaae4edf742d250d3 [ 753.923608] env[62096]: DEBUG oslo_concurrency.lockutils [None req-5fe8118b-4ebb-4166-ab18-d024b5229d8a tempest-ServerRescueTestJSONUnderV235-1957248754 tempest-ServerRescueTestJSONUnderV235-1957248754-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.444s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 753.924232] env[62096]: DEBUG nova.compute.manager [None req-5fe8118b-4ebb-4166-ab18-d024b5229d8a tempest-ServerRescueTestJSONUnderV235-1957248754 tempest-ServerRescueTestJSONUnderV235-1957248754-project-member] [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] Start building networks asynchronously for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 753.925918] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-5fe8118b-4ebb-4166-ab18-d024b5229d8a tempest-ServerRescueTestJSONUnderV235-1957248754 tempest-ServerRescueTestJSONUnderV235-1957248754-project-member] Expecting reply to msg fdfc00cd491e4a199dcfc3e2d4ee0c09 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 753.926903] env[62096]: DEBUG oslo_concurrency.lockutils [None req-d841432a-bb6a-4a14-a0d1-d583a8a5e9c6 tempest-ServerAddressesNegativeTestJSON-1360856578 tempest-ServerAddressesNegativeTestJSON-1360856578-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.957s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 753.929018] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d841432a-bb6a-4a14-a0d1-d583a8a5e9c6 tempest-ServerAddressesNegativeTestJSON-1360856578 tempest-ServerAddressesNegativeTestJSON-1360856578-project-member] Expecting reply to msg 7e7d9687bdee4acb97a3534de34bdf58 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 753.978967] env[62096]: ERROR nova.compute.manager [None req-f7b2ac44-6793-4b58-b560-5a800041e557 tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port f277d93e-a15e-4bae-a5d0-cba98224d99e, please check neutron logs for more information. [ 753.978967] env[62096]: ERROR nova.compute.manager Traceback (most recent call last): [ 753.978967] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 753.978967] env[62096]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 753.978967] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 753.978967] env[62096]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 753.978967] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 753.978967] env[62096]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 753.978967] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 753.978967] env[62096]: ERROR nova.compute.manager self.force_reraise() [ 753.978967] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 753.978967] env[62096]: ERROR nova.compute.manager raise self.value [ 753.978967] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 753.978967] env[62096]: ERROR nova.compute.manager updated_port = self._update_port( [ 753.978967] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 753.978967] env[62096]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 753.979524] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 753.979524] env[62096]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 753.979524] env[62096]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port f277d93e-a15e-4bae-a5d0-cba98224d99e, please check neutron logs for more information. [ 753.979524] env[62096]: ERROR nova.compute.manager [ 753.979524] env[62096]: Traceback (most recent call last): [ 753.979524] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 753.979524] env[62096]: listener.cb(fileno) [ 753.979524] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 753.979524] env[62096]: result = function(*args, **kwargs) [ 753.979524] env[62096]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 753.979524] env[62096]: return func(*args, **kwargs) [ 753.979524] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 753.979524] env[62096]: raise e [ 753.979524] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 753.979524] env[62096]: nwinfo = self.network_api.allocate_for_instance( [ 753.979524] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 753.979524] env[62096]: created_port_ids = self._update_ports_for_instance( [ 753.979524] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 753.979524] env[62096]: with excutils.save_and_reraise_exception(): [ 753.979524] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 753.979524] env[62096]: self.force_reraise() [ 753.979524] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 753.979524] env[62096]: raise self.value [ 753.979524] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 753.979524] env[62096]: updated_port = self._update_port( [ 753.979524] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 753.979524] env[62096]: _ensure_no_port_binding_failure(port) [ 753.979524] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 753.979524] env[62096]: raise exception.PortBindingFailed(port_id=port['id']) [ 753.980484] env[62096]: nova.exception.PortBindingFailed: Binding failed for port f277d93e-a15e-4bae-a5d0-cba98224d99e, please check neutron logs for more information. [ 753.980484] env[62096]: Removing descriptor: 16 [ 753.980484] env[62096]: ERROR nova.compute.manager [None req-f7b2ac44-6793-4b58-b560-5a800041e557 tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port f277d93e-a15e-4bae-a5d0-cba98224d99e, please check neutron logs for more information. [ 753.980484] env[62096]: ERROR nova.compute.manager [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] Traceback (most recent call last): [ 753.980484] env[62096]: ERROR nova.compute.manager [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 753.980484] env[62096]: ERROR nova.compute.manager [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] yield resources [ 753.980484] env[62096]: ERROR nova.compute.manager [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 753.980484] env[62096]: ERROR nova.compute.manager [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] self.driver.spawn(context, instance, image_meta, [ 753.980484] env[62096]: ERROR nova.compute.manager [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 753.980484] env[62096]: ERROR nova.compute.manager [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] self._vmops.spawn(context, instance, image_meta, injected_files, [ 753.980484] env[62096]: ERROR nova.compute.manager [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 753.980484] env[62096]: ERROR nova.compute.manager [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] vm_ref = self.build_virtual_machine(instance, [ 753.980860] env[62096]: ERROR nova.compute.manager [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 753.980860] env[62096]: ERROR nova.compute.manager [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] vif_infos = vmwarevif.get_vif_info(self._session, [ 753.980860] env[62096]: ERROR nova.compute.manager [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 753.980860] env[62096]: ERROR nova.compute.manager [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] for vif in network_info: [ 753.980860] env[62096]: ERROR nova.compute.manager [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 753.980860] env[62096]: ERROR nova.compute.manager [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] return self._sync_wrapper(fn, *args, **kwargs) [ 753.980860] env[62096]: ERROR nova.compute.manager [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 753.980860] env[62096]: ERROR nova.compute.manager [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] self.wait() [ 753.980860] env[62096]: ERROR nova.compute.manager [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 753.980860] env[62096]: ERROR nova.compute.manager [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] self[:] = self._gt.wait() [ 753.980860] env[62096]: ERROR nova.compute.manager [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 753.980860] env[62096]: ERROR nova.compute.manager [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] return self._exit_event.wait() [ 753.980860] env[62096]: ERROR nova.compute.manager [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 753.981270] env[62096]: ERROR nova.compute.manager [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] result = hub.switch() [ 753.981270] env[62096]: ERROR nova.compute.manager [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 753.981270] env[62096]: ERROR nova.compute.manager [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] return self.greenlet.switch() [ 753.981270] env[62096]: ERROR nova.compute.manager [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 753.981270] env[62096]: ERROR nova.compute.manager [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] result = function(*args, **kwargs) [ 753.981270] env[62096]: ERROR nova.compute.manager [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 753.981270] env[62096]: ERROR nova.compute.manager [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] return func(*args, **kwargs) [ 753.981270] env[62096]: ERROR nova.compute.manager [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 753.981270] env[62096]: ERROR nova.compute.manager [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] raise e [ 753.981270] env[62096]: ERROR nova.compute.manager [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 753.981270] env[62096]: ERROR nova.compute.manager [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] nwinfo = self.network_api.allocate_for_instance( [ 753.981270] env[62096]: ERROR nova.compute.manager [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 753.981270] env[62096]: ERROR nova.compute.manager [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] created_port_ids = self._update_ports_for_instance( [ 753.981785] env[62096]: ERROR nova.compute.manager [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 753.981785] env[62096]: ERROR nova.compute.manager [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] with excutils.save_and_reraise_exception(): [ 753.981785] env[62096]: ERROR nova.compute.manager [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 753.981785] env[62096]: ERROR nova.compute.manager [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] self.force_reraise() [ 753.981785] env[62096]: ERROR nova.compute.manager [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 753.981785] env[62096]: ERROR nova.compute.manager [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] raise self.value [ 753.981785] env[62096]: ERROR nova.compute.manager [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 753.981785] env[62096]: ERROR nova.compute.manager [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] updated_port = self._update_port( [ 753.981785] env[62096]: ERROR nova.compute.manager [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 753.981785] env[62096]: ERROR nova.compute.manager [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] _ensure_no_port_binding_failure(port) [ 753.981785] env[62096]: ERROR nova.compute.manager [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 753.981785] env[62096]: ERROR nova.compute.manager [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] raise exception.PortBindingFailed(port_id=port['id']) [ 753.982182] env[62096]: ERROR nova.compute.manager [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] nova.exception.PortBindingFailed: Binding failed for port f277d93e-a15e-4bae-a5d0-cba98224d99e, please check neutron logs for more information. [ 753.982182] env[62096]: ERROR nova.compute.manager [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] [ 753.982182] env[62096]: INFO nova.compute.manager [None req-f7b2ac44-6793-4b58-b560-5a800041e557 tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] Terminating instance [ 753.983346] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fdfc00cd491e4a199dcfc3e2d4ee0c09 [ 753.984369] env[62096]: DEBUG oslo_concurrency.lockutils [None req-f7b2ac44-6793-4b58-b560-5a800041e557 tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Acquiring lock "refresh_cache-7bfac3e2-f06c-4690-9215-a5f67a67c5bd" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 753.986100] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7e7d9687bdee4acb97a3534de34bdf58 [ 754.388611] env[62096]: DEBUG nova.network.neutron [req-b99e59f2-e032-4c6d-a15e-51d0981b8c83 req-a9d6a17a-f9dd-4049-b097-23a24e425b83 service nova] [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 754.431662] env[62096]: DEBUG nova.compute.utils [None req-5fe8118b-4ebb-4166-ab18-d024b5229d8a tempest-ServerRescueTestJSONUnderV235-1957248754 tempest-ServerRescueTestJSONUnderV235-1957248754-project-member] Using /dev/sd instead of None {{(pid=62096) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 754.432394] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-5fe8118b-4ebb-4166-ab18-d024b5229d8a tempest-ServerRescueTestJSONUnderV235-1957248754 tempest-ServerRescueTestJSONUnderV235-1957248754-project-member] Expecting reply to msg 14b8a4b419ad4754960119ace2e39e3e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 754.433315] env[62096]: DEBUG nova.compute.manager [None req-5fe8118b-4ebb-4166-ab18-d024b5229d8a tempest-ServerRescueTestJSONUnderV235-1957248754 tempest-ServerRescueTestJSONUnderV235-1957248754-project-member] [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] Allocating IP information in the background. {{(pid=62096) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 754.433471] env[62096]: DEBUG nova.network.neutron [None req-5fe8118b-4ebb-4166-ab18-d024b5229d8a tempest-ServerRescueTestJSONUnderV235-1957248754 tempest-ServerRescueTestJSONUnderV235-1957248754-project-member] [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] allocate_for_instance() {{(pid=62096) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 754.442883] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 14b8a4b419ad4754960119ace2e39e3e [ 754.456400] env[62096]: DEBUG nova.network.neutron [req-b99e59f2-e032-4c6d-a15e-51d0981b8c83 req-a9d6a17a-f9dd-4049-b097-23a24e425b83 service nova] [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 754.456928] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-b99e59f2-e032-4c6d-a15e-51d0981b8c83 req-a9d6a17a-f9dd-4049-b097-23a24e425b83 service nova] Expecting reply to msg b8fc2437860945ad8945f0df78ffbf72 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 754.471845] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b8fc2437860945ad8945f0df78ffbf72 [ 754.483263] env[62096]: DEBUG nova.policy [None req-5fe8118b-4ebb-4166-ab18-d024b5229d8a tempest-ServerRescueTestJSONUnderV235-1957248754 tempest-ServerRescueTestJSONUnderV235-1957248754-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '74609dbea69a44a0bded6108e6c89e8d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '273ae0b5c24f4163894e1aa3d8508631', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62096) authorize /opt/stack/nova/nova/policy.py:203}} [ 754.792891] env[62096]: DEBUG oslo_concurrency.lockutils [None req-c8a2d481-d63a-4d02-82f0-f3787fb45049 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Acquiring lock "a7838568-9d47-4306-8bb6-9ad74ab1feb3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 754.793165] env[62096]: DEBUG oslo_concurrency.lockutils [None req-c8a2d481-d63a-4d02-82f0-f3787fb45049 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Lock "a7838568-9d47-4306-8bb6-9ad74ab1feb3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 754.857003] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a94f27a4-1b37-4057-b74f-997a98e3fc05 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.860881] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1589fd7b-9c86-4b3b-af03-e38cd7cfa4bd {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.893872] env[62096]: DEBUG nova.network.neutron [None req-5fe8118b-4ebb-4166-ab18-d024b5229d8a tempest-ServerRescueTestJSONUnderV235-1957248754 tempest-ServerRescueTestJSONUnderV235-1957248754-project-member] [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] Successfully created port: c7261eb5-f3d8-4ec4-ace3-402cb0fa8f20 {{(pid=62096) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 754.896754] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52646b28-8c50-4d25-99b4-969f1913e79a {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.907168] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-093b628f-63b1-4d45-836e-8dbb11848844 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.921467] env[62096]: DEBUG nova.compute.provider_tree [None req-d841432a-bb6a-4a14-a0d1-d583a8a5e9c6 tempest-ServerAddressesNegativeTestJSON-1360856578 tempest-ServerAddressesNegativeTestJSON-1360856578-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 754.922173] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d841432a-bb6a-4a14-a0d1-d583a8a5e9c6 tempest-ServerAddressesNegativeTestJSON-1360856578 tempest-ServerAddressesNegativeTestJSON-1360856578-project-member] Expecting reply to msg bab7b6bb07184a6d9bf30bce5dbeb696 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 754.934847] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bab7b6bb07184a6d9bf30bce5dbeb696 [ 754.936620] env[62096]: DEBUG nova.compute.manager [None req-5fe8118b-4ebb-4166-ab18-d024b5229d8a tempest-ServerRescueTestJSONUnderV235-1957248754 tempest-ServerRescueTestJSONUnderV235-1957248754-project-member] [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] Start building block device mappings for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 754.938304] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-5fe8118b-4ebb-4166-ab18-d024b5229d8a tempest-ServerRescueTestJSONUnderV235-1957248754 tempest-ServerRescueTestJSONUnderV235-1957248754-project-member] Expecting reply to msg b571bce6b06b48a6a000018e06b7d40b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 754.961785] env[62096]: DEBUG oslo_concurrency.lockutils [req-b99e59f2-e032-4c6d-a15e-51d0981b8c83 req-a9d6a17a-f9dd-4049-b097-23a24e425b83 service nova] Releasing lock "refresh_cache-7bfac3e2-f06c-4690-9215-a5f67a67c5bd" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 754.962270] env[62096]: DEBUG oslo_concurrency.lockutils [None req-f7b2ac44-6793-4b58-b560-5a800041e557 tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Acquired lock "refresh_cache-7bfac3e2-f06c-4690-9215-a5f67a67c5bd" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 754.962451] env[62096]: DEBUG nova.network.neutron [None req-f7b2ac44-6793-4b58-b560-5a800041e557 tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 754.962922] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-f7b2ac44-6793-4b58-b560-5a800041e557 tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Expecting reply to msg 138fe0cc1a0b453d85ece590c51bd5f7 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 754.977188] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 138fe0cc1a0b453d85ece590c51bd5f7 [ 755.014965] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b571bce6b06b48a6a000018e06b7d40b [ 755.425181] env[62096]: DEBUG nova.scheduler.client.report [None req-d841432a-bb6a-4a14-a0d1-d583a8a5e9c6 tempest-ServerAddressesNegativeTestJSON-1360856578 tempest-ServerAddressesNegativeTestJSON-1360856578-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 755.427591] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d841432a-bb6a-4a14-a0d1-d583a8a5e9c6 tempest-ServerAddressesNegativeTestJSON-1360856578 tempest-ServerAddressesNegativeTestJSON-1360856578-project-member] Expecting reply to msg 5dc14923b5934617a562e5c205ff3f6e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 755.439355] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5dc14923b5934617a562e5c205ff3f6e [ 755.441975] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-5fe8118b-4ebb-4166-ab18-d024b5229d8a tempest-ServerRescueTestJSONUnderV235-1957248754 tempest-ServerRescueTestJSONUnderV235-1957248754-project-member] Expecting reply to msg 2d40d7d004d94cf59243d73afb22d993 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 755.478896] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2d40d7d004d94cf59243d73afb22d993 [ 755.616460] env[62096]: DEBUG nova.network.neutron [None req-f7b2ac44-6793-4b58-b560-5a800041e557 tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 755.738868] env[62096]: DEBUG nova.network.neutron [None req-f7b2ac44-6793-4b58-b560-5a800041e557 tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 755.739381] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-f7b2ac44-6793-4b58-b560-5a800041e557 tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Expecting reply to msg f613b9ec8d27488988229d9bbef04b6b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 755.748343] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f613b9ec8d27488988229d9bbef04b6b [ 755.930341] env[62096]: DEBUG oslo_concurrency.lockutils [None req-d841432a-bb6a-4a14-a0d1-d583a8a5e9c6 tempest-ServerAddressesNegativeTestJSON-1360856578 tempest-ServerAddressesNegativeTestJSON-1360856578-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.003s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 755.930984] env[62096]: ERROR nova.compute.manager [None req-d841432a-bb6a-4a14-a0d1-d583a8a5e9c6 tempest-ServerAddressesNegativeTestJSON-1360856578 tempest-ServerAddressesNegativeTestJSON-1360856578-project-member] [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port e1e9548f-8e05-4826-85a0-6602fd03e638, please check neutron logs for more information. [ 755.930984] env[62096]: ERROR nova.compute.manager [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] Traceback (most recent call last): [ 755.930984] env[62096]: ERROR nova.compute.manager [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 755.930984] env[62096]: ERROR nova.compute.manager [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] self.driver.spawn(context, instance, image_meta, [ 755.930984] env[62096]: ERROR nova.compute.manager [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 755.930984] env[62096]: ERROR nova.compute.manager [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 755.930984] env[62096]: ERROR nova.compute.manager [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 755.930984] env[62096]: ERROR nova.compute.manager [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] vm_ref = self.build_virtual_machine(instance, [ 755.930984] env[62096]: ERROR nova.compute.manager [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 755.930984] env[62096]: ERROR nova.compute.manager [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] vif_infos = vmwarevif.get_vif_info(self._session, [ 755.930984] env[62096]: ERROR nova.compute.manager [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 755.931374] env[62096]: ERROR nova.compute.manager [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] for vif in network_info: [ 755.931374] env[62096]: ERROR nova.compute.manager [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 755.931374] env[62096]: ERROR nova.compute.manager [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] return self._sync_wrapper(fn, *args, **kwargs) [ 755.931374] env[62096]: ERROR nova.compute.manager [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 755.931374] env[62096]: ERROR nova.compute.manager [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] self.wait() [ 755.931374] env[62096]: ERROR nova.compute.manager [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 755.931374] env[62096]: ERROR nova.compute.manager [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] self[:] = self._gt.wait() [ 755.931374] env[62096]: ERROR nova.compute.manager [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 755.931374] env[62096]: ERROR nova.compute.manager [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] return self._exit_event.wait() [ 755.931374] env[62096]: ERROR nova.compute.manager [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 755.931374] env[62096]: ERROR nova.compute.manager [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] result = hub.switch() [ 755.931374] env[62096]: ERROR nova.compute.manager [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 755.931374] env[62096]: ERROR nova.compute.manager [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] return self.greenlet.switch() [ 755.931814] env[62096]: ERROR nova.compute.manager [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 755.931814] env[62096]: ERROR nova.compute.manager [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] result = function(*args, **kwargs) [ 755.931814] env[62096]: ERROR nova.compute.manager [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 755.931814] env[62096]: ERROR nova.compute.manager [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] return func(*args, **kwargs) [ 755.931814] env[62096]: ERROR nova.compute.manager [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 755.931814] env[62096]: ERROR nova.compute.manager [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] raise e [ 755.931814] env[62096]: ERROR nova.compute.manager [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 755.931814] env[62096]: ERROR nova.compute.manager [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] nwinfo = self.network_api.allocate_for_instance( [ 755.931814] env[62096]: ERROR nova.compute.manager [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 755.931814] env[62096]: ERROR nova.compute.manager [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] created_port_ids = self._update_ports_for_instance( [ 755.931814] env[62096]: ERROR nova.compute.manager [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 755.931814] env[62096]: ERROR nova.compute.manager [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] with excutils.save_and_reraise_exception(): [ 755.931814] env[62096]: ERROR nova.compute.manager [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 755.932273] env[62096]: ERROR nova.compute.manager [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] self.force_reraise() [ 755.932273] env[62096]: ERROR nova.compute.manager [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 755.932273] env[62096]: ERROR nova.compute.manager [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] raise self.value [ 755.932273] env[62096]: ERROR nova.compute.manager [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 755.932273] env[62096]: ERROR nova.compute.manager [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] updated_port = self._update_port( [ 755.932273] env[62096]: ERROR nova.compute.manager [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 755.932273] env[62096]: ERROR nova.compute.manager [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] _ensure_no_port_binding_failure(port) [ 755.932273] env[62096]: ERROR nova.compute.manager [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 755.932273] env[62096]: ERROR nova.compute.manager [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] raise exception.PortBindingFailed(port_id=port['id']) [ 755.932273] env[62096]: ERROR nova.compute.manager [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] nova.exception.PortBindingFailed: Binding failed for port e1e9548f-8e05-4826-85a0-6602fd03e638, please check neutron logs for more information. [ 755.932273] env[62096]: ERROR nova.compute.manager [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] [ 755.932653] env[62096]: DEBUG nova.compute.utils [None req-d841432a-bb6a-4a14-a0d1-d583a8a5e9c6 tempest-ServerAddressesNegativeTestJSON-1360856578 tempest-ServerAddressesNegativeTestJSON-1360856578-project-member] [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] Binding failed for port e1e9548f-8e05-4826-85a0-6602fd03e638, please check neutron logs for more information. {{(pid=62096) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 755.933827] env[62096]: DEBUG oslo_concurrency.lockutils [None req-c8a8569a-9a38-4dba-b714-875ef3e36f76 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.267s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 755.936284] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c8a8569a-9a38-4dba-b714-875ef3e36f76 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Expecting reply to msg 451e16f6e6fb48dca249ce92c8053e32 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 755.946351] env[62096]: DEBUG nova.compute.manager [req-fba5ab53-7277-4221-8b9f-fb6d546b2c07 req-51b32c0e-8ef3-48df-a45d-a6faad421b93 service nova] [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] Received event network-vif-deleted-f277d93e-a15e-4bae-a5d0-cba98224d99e {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 755.946839] env[62096]: DEBUG nova.compute.manager [None req-d841432a-bb6a-4a14-a0d1-d583a8a5e9c6 tempest-ServerAddressesNegativeTestJSON-1360856578 tempest-ServerAddressesNegativeTestJSON-1360856578-project-member] [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] Build of instance 58df043b-ab2f-4e78-8bba-084fe53d3d8e was re-scheduled: Binding failed for port e1e9548f-8e05-4826-85a0-6602fd03e638, please check neutron logs for more information. {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 755.947308] env[62096]: DEBUG nova.compute.manager [None req-d841432a-bb6a-4a14-a0d1-d583a8a5e9c6 tempest-ServerAddressesNegativeTestJSON-1360856578 tempest-ServerAddressesNegativeTestJSON-1360856578-project-member] [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] Unplugging VIFs for instance {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 755.947539] env[62096]: DEBUG oslo_concurrency.lockutils [None req-d841432a-bb6a-4a14-a0d1-d583a8a5e9c6 tempest-ServerAddressesNegativeTestJSON-1360856578 tempest-ServerAddressesNegativeTestJSON-1360856578-project-member] Acquiring lock "refresh_cache-58df043b-ab2f-4e78-8bba-084fe53d3d8e" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 755.947694] env[62096]: DEBUG oslo_concurrency.lockutils [None req-d841432a-bb6a-4a14-a0d1-d583a8a5e9c6 tempest-ServerAddressesNegativeTestJSON-1360856578 tempest-ServerAddressesNegativeTestJSON-1360856578-project-member] Acquired lock "refresh_cache-58df043b-ab2f-4e78-8bba-084fe53d3d8e" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 755.947854] env[62096]: DEBUG nova.network.neutron [None req-d841432a-bb6a-4a14-a0d1-d583a8a5e9c6 tempest-ServerAddressesNegativeTestJSON-1360856578 tempest-ServerAddressesNegativeTestJSON-1360856578-project-member] [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 755.948299] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d841432a-bb6a-4a14-a0d1-d583a8a5e9c6 tempest-ServerAddressesNegativeTestJSON-1360856578 tempest-ServerAddressesNegativeTestJSON-1360856578-project-member] Expecting reply to msg ecb2c58cca194236b756f11f6d1d4491 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 755.954067] env[62096]: DEBUG nova.compute.manager [None req-5fe8118b-4ebb-4166-ab18-d024b5229d8a tempest-ServerRescueTestJSONUnderV235-1957248754 tempest-ServerRescueTestJSONUnderV235-1957248754-project-member] [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] Start spawning the instance on the hypervisor. {{(pid=62096) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 755.964478] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ecb2c58cca194236b756f11f6d1d4491 [ 755.972957] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 451e16f6e6fb48dca249ce92c8053e32 [ 755.988930] env[62096]: DEBUG nova.virt.hardware [None req-5fe8118b-4ebb-4166-ab18-d024b5229d8a tempest-ServerRescueTestJSONUnderV235-1957248754 tempest-ServerRescueTestJSONUnderV235-1957248754-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-19T11:52:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-19T11:51:55Z,direct_url=,disk_format='vmdk',id=fb764baa-3805-45c4-a694-aa91b0932110,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bd76085d004b408bbcf1ebf76ba2593e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-19T11:51:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 755.988930] env[62096]: DEBUG nova.virt.hardware [None req-5fe8118b-4ebb-4166-ab18-d024b5229d8a tempest-ServerRescueTestJSONUnderV235-1957248754 tempest-ServerRescueTestJSONUnderV235-1957248754-project-member] Flavor limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 755.988930] env[62096]: DEBUG nova.virt.hardware [None req-5fe8118b-4ebb-4166-ab18-d024b5229d8a tempest-ServerRescueTestJSONUnderV235-1957248754 tempest-ServerRescueTestJSONUnderV235-1957248754-project-member] Image limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 755.989101] env[62096]: DEBUG nova.virt.hardware [None req-5fe8118b-4ebb-4166-ab18-d024b5229d8a tempest-ServerRescueTestJSONUnderV235-1957248754 tempest-ServerRescueTestJSONUnderV235-1957248754-project-member] Flavor pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 755.989101] env[62096]: DEBUG nova.virt.hardware [None req-5fe8118b-4ebb-4166-ab18-d024b5229d8a tempest-ServerRescueTestJSONUnderV235-1957248754 tempest-ServerRescueTestJSONUnderV235-1957248754-project-member] Image pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 755.989101] env[62096]: DEBUG nova.virt.hardware [None req-5fe8118b-4ebb-4166-ab18-d024b5229d8a tempest-ServerRescueTestJSONUnderV235-1957248754 tempest-ServerRescueTestJSONUnderV235-1957248754-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 755.989101] env[62096]: DEBUG nova.virt.hardware [None req-5fe8118b-4ebb-4166-ab18-d024b5229d8a tempest-ServerRescueTestJSONUnderV235-1957248754 tempest-ServerRescueTestJSONUnderV235-1957248754-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 755.989101] env[62096]: DEBUG nova.virt.hardware [None req-5fe8118b-4ebb-4166-ab18-d024b5229d8a tempest-ServerRescueTestJSONUnderV235-1957248754 tempest-ServerRescueTestJSONUnderV235-1957248754-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 755.989285] env[62096]: DEBUG nova.virt.hardware [None req-5fe8118b-4ebb-4166-ab18-d024b5229d8a tempest-ServerRescueTestJSONUnderV235-1957248754 tempest-ServerRescueTestJSONUnderV235-1957248754-project-member] Got 1 possible topologies {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 755.989285] env[62096]: DEBUG nova.virt.hardware [None req-5fe8118b-4ebb-4166-ab18-d024b5229d8a tempest-ServerRescueTestJSONUnderV235-1957248754 tempest-ServerRescueTestJSONUnderV235-1957248754-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 755.989356] env[62096]: DEBUG nova.virt.hardware [None req-5fe8118b-4ebb-4166-ab18-d024b5229d8a tempest-ServerRescueTestJSONUnderV235-1957248754 tempest-ServerRescueTestJSONUnderV235-1957248754-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 755.990530] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c81c781-7492-4e50-9acd-c03cad45a532 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.998464] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bce308bb-eb6c-4bb9-8efd-c5878e5032d5 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.241484] env[62096]: DEBUG oslo_concurrency.lockutils [None req-f7b2ac44-6793-4b58-b560-5a800041e557 tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Releasing lock "refresh_cache-7bfac3e2-f06c-4690-9215-a5f67a67c5bd" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 756.242006] env[62096]: DEBUG nova.compute.manager [None req-f7b2ac44-6793-4b58-b560-5a800041e557 tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] Start destroying the instance on the hypervisor. {{(pid=62096) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 756.242268] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-f7b2ac44-6793-4b58-b560-5a800041e557 tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] Destroying instance {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 756.242561] env[62096]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a2c517ab-d02b-42e4-92e6-eba72658cd50 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.252368] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35b6079f-ef22-4554-9620-076ea8e716d7 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.273399] env[62096]: WARNING nova.virt.vmwareapi.vmops [None req-f7b2ac44-6793-4b58-b560-5a800041e557 tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 7bfac3e2-f06c-4690-9215-a5f67a67c5bd could not be found. [ 756.273623] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-f7b2ac44-6793-4b58-b560-5a800041e557 tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] Instance destroyed {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 756.273807] env[62096]: INFO nova.compute.manager [None req-f7b2ac44-6793-4b58-b560-5a800041e557 tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] Took 0.03 seconds to destroy the instance on the hypervisor. [ 756.274103] env[62096]: DEBUG oslo.service.loopingcall [None req-f7b2ac44-6793-4b58-b560-5a800041e557 tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 756.274309] env[62096]: DEBUG nova.compute.manager [-] [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 756.274403] env[62096]: DEBUG nova.network.neutron [-] [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 756.296542] env[62096]: DEBUG nova.network.neutron [-] [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 756.297026] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 7490a3fbd8a04a10968e803ad24893d7 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 756.303856] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7490a3fbd8a04a10968e803ad24893d7 [ 756.484193] env[62096]: ERROR nova.compute.manager [None req-5fe8118b-4ebb-4166-ab18-d024b5229d8a tempest-ServerRescueTestJSONUnderV235-1957248754 tempest-ServerRescueTestJSONUnderV235-1957248754-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port c7261eb5-f3d8-4ec4-ace3-402cb0fa8f20, please check neutron logs for more information. [ 756.484193] env[62096]: ERROR nova.compute.manager Traceback (most recent call last): [ 756.484193] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 756.484193] env[62096]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 756.484193] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 756.484193] env[62096]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 756.484193] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 756.484193] env[62096]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 756.484193] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 756.484193] env[62096]: ERROR nova.compute.manager self.force_reraise() [ 756.484193] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 756.484193] env[62096]: ERROR nova.compute.manager raise self.value [ 756.484193] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 756.484193] env[62096]: ERROR nova.compute.manager updated_port = self._update_port( [ 756.484193] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 756.484193] env[62096]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 756.484886] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 756.484886] env[62096]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 756.484886] env[62096]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port c7261eb5-f3d8-4ec4-ace3-402cb0fa8f20, please check neutron logs for more information. [ 756.484886] env[62096]: ERROR nova.compute.manager [ 756.484886] env[62096]: Traceback (most recent call last): [ 756.484886] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 756.484886] env[62096]: listener.cb(fileno) [ 756.484886] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 756.484886] env[62096]: result = function(*args, **kwargs) [ 756.484886] env[62096]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 756.484886] env[62096]: return func(*args, **kwargs) [ 756.484886] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 756.484886] env[62096]: raise e [ 756.484886] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 756.484886] env[62096]: nwinfo = self.network_api.allocate_for_instance( [ 756.484886] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 756.484886] env[62096]: created_port_ids = self._update_ports_for_instance( [ 756.484886] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 756.484886] env[62096]: with excutils.save_and_reraise_exception(): [ 756.484886] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 756.484886] env[62096]: self.force_reraise() [ 756.484886] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 756.484886] env[62096]: raise self.value [ 756.484886] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 756.484886] env[62096]: updated_port = self._update_port( [ 756.484886] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 756.484886] env[62096]: _ensure_no_port_binding_failure(port) [ 756.484886] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 756.484886] env[62096]: raise exception.PortBindingFailed(port_id=port['id']) [ 756.485727] env[62096]: nova.exception.PortBindingFailed: Binding failed for port c7261eb5-f3d8-4ec4-ace3-402cb0fa8f20, please check neutron logs for more information. [ 756.485727] env[62096]: Removing descriptor: 19 [ 756.485793] env[62096]: ERROR nova.compute.manager [None req-5fe8118b-4ebb-4166-ab18-d024b5229d8a tempest-ServerRescueTestJSONUnderV235-1957248754 tempest-ServerRescueTestJSONUnderV235-1957248754-project-member] [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port c7261eb5-f3d8-4ec4-ace3-402cb0fa8f20, please check neutron logs for more information. [ 756.485793] env[62096]: ERROR nova.compute.manager [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] Traceback (most recent call last): [ 756.485793] env[62096]: ERROR nova.compute.manager [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 756.485793] env[62096]: ERROR nova.compute.manager [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] yield resources [ 756.485793] env[62096]: ERROR nova.compute.manager [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 756.485793] env[62096]: ERROR nova.compute.manager [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] self.driver.spawn(context, instance, image_meta, [ 756.485793] env[62096]: ERROR nova.compute.manager [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 756.485793] env[62096]: ERROR nova.compute.manager [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 756.485793] env[62096]: ERROR nova.compute.manager [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 756.485793] env[62096]: ERROR nova.compute.manager [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] vm_ref = self.build_virtual_machine(instance, [ 756.485793] env[62096]: ERROR nova.compute.manager [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 756.486114] env[62096]: ERROR nova.compute.manager [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] vif_infos = vmwarevif.get_vif_info(self._session, [ 756.486114] env[62096]: ERROR nova.compute.manager [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 756.486114] env[62096]: ERROR nova.compute.manager [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] for vif in network_info: [ 756.486114] env[62096]: ERROR nova.compute.manager [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 756.486114] env[62096]: ERROR nova.compute.manager [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] return self._sync_wrapper(fn, *args, **kwargs) [ 756.486114] env[62096]: ERROR nova.compute.manager [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 756.486114] env[62096]: ERROR nova.compute.manager [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] self.wait() [ 756.486114] env[62096]: ERROR nova.compute.manager [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 756.486114] env[62096]: ERROR nova.compute.manager [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] self[:] = self._gt.wait() [ 756.486114] env[62096]: ERROR nova.compute.manager [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 756.486114] env[62096]: ERROR nova.compute.manager [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] return self._exit_event.wait() [ 756.486114] env[62096]: ERROR nova.compute.manager [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 756.486114] env[62096]: ERROR nova.compute.manager [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] result = hub.switch() [ 756.486500] env[62096]: ERROR nova.compute.manager [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 756.486500] env[62096]: ERROR nova.compute.manager [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] return self.greenlet.switch() [ 756.486500] env[62096]: ERROR nova.compute.manager [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 756.486500] env[62096]: ERROR nova.compute.manager [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] result = function(*args, **kwargs) [ 756.486500] env[62096]: ERROR nova.compute.manager [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 756.486500] env[62096]: ERROR nova.compute.manager [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] return func(*args, **kwargs) [ 756.486500] env[62096]: ERROR nova.compute.manager [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 756.486500] env[62096]: ERROR nova.compute.manager [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] raise e [ 756.486500] env[62096]: ERROR nova.compute.manager [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 756.486500] env[62096]: ERROR nova.compute.manager [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] nwinfo = self.network_api.allocate_for_instance( [ 756.486500] env[62096]: ERROR nova.compute.manager [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 756.486500] env[62096]: ERROR nova.compute.manager [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] created_port_ids = self._update_ports_for_instance( [ 756.486500] env[62096]: ERROR nova.compute.manager [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 756.486896] env[62096]: ERROR nova.compute.manager [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] with excutils.save_and_reraise_exception(): [ 756.486896] env[62096]: ERROR nova.compute.manager [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 756.486896] env[62096]: ERROR nova.compute.manager [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] self.force_reraise() [ 756.486896] env[62096]: ERROR nova.compute.manager [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 756.486896] env[62096]: ERROR nova.compute.manager [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] raise self.value [ 756.486896] env[62096]: ERROR nova.compute.manager [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 756.486896] env[62096]: ERROR nova.compute.manager [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] updated_port = self._update_port( [ 756.486896] env[62096]: ERROR nova.compute.manager [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 756.486896] env[62096]: ERROR nova.compute.manager [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] _ensure_no_port_binding_failure(port) [ 756.486896] env[62096]: ERROR nova.compute.manager [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 756.486896] env[62096]: ERROR nova.compute.manager [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] raise exception.PortBindingFailed(port_id=port['id']) [ 756.486896] env[62096]: ERROR nova.compute.manager [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] nova.exception.PortBindingFailed: Binding failed for port c7261eb5-f3d8-4ec4-ace3-402cb0fa8f20, please check neutron logs for more information. [ 756.486896] env[62096]: ERROR nova.compute.manager [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] [ 756.487332] env[62096]: INFO nova.compute.manager [None req-5fe8118b-4ebb-4166-ab18-d024b5229d8a tempest-ServerRescueTestJSONUnderV235-1957248754 tempest-ServerRescueTestJSONUnderV235-1957248754-project-member] [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] Terminating instance [ 756.488910] env[62096]: DEBUG oslo_concurrency.lockutils [None req-5fe8118b-4ebb-4166-ab18-d024b5229d8a tempest-ServerRescueTestJSONUnderV235-1957248754 tempest-ServerRescueTestJSONUnderV235-1957248754-project-member] Acquiring lock "refresh_cache-fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 756.489190] env[62096]: DEBUG oslo_concurrency.lockutils [None req-5fe8118b-4ebb-4166-ab18-d024b5229d8a tempest-ServerRescueTestJSONUnderV235-1957248754 tempest-ServerRescueTestJSONUnderV235-1957248754-project-member] Acquired lock "refresh_cache-fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 756.489535] env[62096]: DEBUG nova.network.neutron [None req-5fe8118b-4ebb-4166-ab18-d024b5229d8a tempest-ServerRescueTestJSONUnderV235-1957248754 tempest-ServerRescueTestJSONUnderV235-1957248754-project-member] [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 756.490208] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-5fe8118b-4ebb-4166-ab18-d024b5229d8a tempest-ServerRescueTestJSONUnderV235-1957248754 tempest-ServerRescueTestJSONUnderV235-1957248754-project-member] Expecting reply to msg 5f44819a1b0a4e269465a3ef86728e4d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 756.495712] env[62096]: DEBUG nova.network.neutron [None req-d841432a-bb6a-4a14-a0d1-d583a8a5e9c6 tempest-ServerAddressesNegativeTestJSON-1360856578 tempest-ServerAddressesNegativeTestJSON-1360856578-project-member] [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 756.504020] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5f44819a1b0a4e269465a3ef86728e4d [ 756.582646] env[62096]: DEBUG nova.network.neutron [None req-d841432a-bb6a-4a14-a0d1-d583a8a5e9c6 tempest-ServerAddressesNegativeTestJSON-1360856578 tempest-ServerAddressesNegativeTestJSON-1360856578-project-member] [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 756.583146] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d841432a-bb6a-4a14-a0d1-d583a8a5e9c6 tempest-ServerAddressesNegativeTestJSON-1360856578 tempest-ServerAddressesNegativeTestJSON-1360856578-project-member] Expecting reply to msg a3b12fbef92e4e9e8f96ec661f8b683b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 756.591248] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a3b12fbef92e4e9e8f96ec661f8b683b [ 756.778460] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d47025e3-b9a7-4ff4-9dff-c2b58d5f2bea {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.785954] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-398006db-9e82-4818-9899-561ad94499f8 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.816378] env[62096]: DEBUG nova.network.neutron [-] [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 756.816825] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 68f8fa9f088a441ca5436579b4c68b47 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 756.824242] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d403ec48-75d5-49de-bed0-390c5eb9ef76 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.825635] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 68f8fa9f088a441ca5436579b4c68b47 [ 756.826947] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f2d1e91-63bc-49be-8c6d-0b70611c23e2 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.840141] env[62096]: DEBUG nova.compute.provider_tree [None req-c8a8569a-9a38-4dba-b714-875ef3e36f76 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 756.840626] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c8a8569a-9a38-4dba-b714-875ef3e36f76 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Expecting reply to msg b03ed0465dd546e39c34a2fe77f33b72 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 756.847686] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b03ed0465dd546e39c34a2fe77f33b72 [ 757.024207] env[62096]: DEBUG nova.network.neutron [None req-5fe8118b-4ebb-4166-ab18-d024b5229d8a tempest-ServerRescueTestJSONUnderV235-1957248754 tempest-ServerRescueTestJSONUnderV235-1957248754-project-member] [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 757.085669] env[62096]: DEBUG oslo_concurrency.lockutils [None req-d841432a-bb6a-4a14-a0d1-d583a8a5e9c6 tempest-ServerAddressesNegativeTestJSON-1360856578 tempest-ServerAddressesNegativeTestJSON-1360856578-project-member] Releasing lock "refresh_cache-58df043b-ab2f-4e78-8bba-084fe53d3d8e" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 757.086040] env[62096]: DEBUG nova.compute.manager [None req-d841432a-bb6a-4a14-a0d1-d583a8a5e9c6 tempest-ServerAddressesNegativeTestJSON-1360856578 tempest-ServerAddressesNegativeTestJSON-1360856578-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 757.086304] env[62096]: DEBUG nova.compute.manager [None req-d841432a-bb6a-4a14-a0d1-d583a8a5e9c6 tempest-ServerAddressesNegativeTestJSON-1360856578 tempest-ServerAddressesNegativeTestJSON-1360856578-project-member] [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 757.086533] env[62096]: DEBUG nova.network.neutron [None req-d841432a-bb6a-4a14-a0d1-d583a8a5e9c6 tempest-ServerAddressesNegativeTestJSON-1360856578 tempest-ServerAddressesNegativeTestJSON-1360856578-project-member] [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 757.104180] env[62096]: DEBUG nova.network.neutron [None req-d841432a-bb6a-4a14-a0d1-d583a8a5e9c6 tempest-ServerAddressesNegativeTestJSON-1360856578 tempest-ServerAddressesNegativeTestJSON-1360856578-project-member] [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 757.104808] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d841432a-bb6a-4a14-a0d1-d583a8a5e9c6 tempest-ServerAddressesNegativeTestJSON-1360856578 tempest-ServerAddressesNegativeTestJSON-1360856578-project-member] Expecting reply to msg b8b42dd1ebc74e61b316c8cb0d6ba946 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 757.111707] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b8b42dd1ebc74e61b316c8cb0d6ba946 [ 757.147732] env[62096]: DEBUG nova.network.neutron [None req-5fe8118b-4ebb-4166-ab18-d024b5229d8a tempest-ServerRescueTestJSONUnderV235-1957248754 tempest-ServerRescueTestJSONUnderV235-1957248754-project-member] [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 757.148261] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-5fe8118b-4ebb-4166-ab18-d024b5229d8a tempest-ServerRescueTestJSONUnderV235-1957248754 tempest-ServerRescueTestJSONUnderV235-1957248754-project-member] Expecting reply to msg d55886c613b74321a4152ffcc20460d1 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 757.156735] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d55886c613b74321a4152ffcc20460d1 [ 757.323085] env[62096]: INFO nova.compute.manager [-] [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] Took 1.05 seconds to deallocate network for instance. [ 757.325612] env[62096]: DEBUG nova.compute.claims [None req-f7b2ac44-6793-4b58-b560-5a800041e557 tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] Aborting claim: {{(pid=62096) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 757.325791] env[62096]: DEBUG oslo_concurrency.lockutils [None req-f7b2ac44-6793-4b58-b560-5a800041e557 tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 757.342811] env[62096]: DEBUG nova.scheduler.client.report [None req-c8a8569a-9a38-4dba-b714-875ef3e36f76 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 757.345352] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c8a8569a-9a38-4dba-b714-875ef3e36f76 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Expecting reply to msg 541f89a8bca3458e93b80579342f95ef in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 757.363863] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 541f89a8bca3458e93b80579342f95ef [ 757.608506] env[62096]: DEBUG nova.network.neutron [None req-d841432a-bb6a-4a14-a0d1-d583a8a5e9c6 tempest-ServerAddressesNegativeTestJSON-1360856578 tempest-ServerAddressesNegativeTestJSON-1360856578-project-member] [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 757.609527] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d841432a-bb6a-4a14-a0d1-d583a8a5e9c6 tempest-ServerAddressesNegativeTestJSON-1360856578 tempest-ServerAddressesNegativeTestJSON-1360856578-project-member] Expecting reply to msg f0766d412bf540d69e20ee6a08515b38 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 757.618552] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f0766d412bf540d69e20ee6a08515b38 [ 757.650266] env[62096]: DEBUG oslo_concurrency.lockutils [None req-5fe8118b-4ebb-4166-ab18-d024b5229d8a tempest-ServerRescueTestJSONUnderV235-1957248754 tempest-ServerRescueTestJSONUnderV235-1957248754-project-member] Releasing lock "refresh_cache-fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 757.650656] env[62096]: DEBUG nova.compute.manager [None req-5fe8118b-4ebb-4166-ab18-d024b5229d8a tempest-ServerRescueTestJSONUnderV235-1957248754 tempest-ServerRescueTestJSONUnderV235-1957248754-project-member] [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] Start destroying the instance on the hypervisor. {{(pid=62096) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 757.650848] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-5fe8118b-4ebb-4166-ab18-d024b5229d8a tempest-ServerRescueTestJSONUnderV235-1957248754 tempest-ServerRescueTestJSONUnderV235-1957248754-project-member] [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] Destroying instance {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 757.651345] env[62096]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-97efa7d3-bf90-4573-9e22-52f39ac74197 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.661286] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80b09a25-7317-426a-9e11-a67da4a4ec1d {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.681536] env[62096]: WARNING nova.virt.vmwareapi.vmops [None req-5fe8118b-4ebb-4166-ab18-d024b5229d8a tempest-ServerRescueTestJSONUnderV235-1957248754 tempest-ServerRescueTestJSONUnderV235-1957248754-project-member] [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c could not be found. [ 757.681696] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-5fe8118b-4ebb-4166-ab18-d024b5229d8a tempest-ServerRescueTestJSONUnderV235-1957248754 tempest-ServerRescueTestJSONUnderV235-1957248754-project-member] [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] Instance destroyed {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 757.681869] env[62096]: INFO nova.compute.manager [None req-5fe8118b-4ebb-4166-ab18-d024b5229d8a tempest-ServerRescueTestJSONUnderV235-1957248754 tempest-ServerRescueTestJSONUnderV235-1957248754-project-member] [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] Took 0.03 seconds to destroy the instance on the hypervisor. [ 757.682101] env[62096]: DEBUG oslo.service.loopingcall [None req-5fe8118b-4ebb-4166-ab18-d024b5229d8a tempest-ServerRescueTestJSONUnderV235-1957248754 tempest-ServerRescueTestJSONUnderV235-1957248754-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 757.682309] env[62096]: DEBUG nova.compute.manager [-] [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 757.682403] env[62096]: DEBUG nova.network.neutron [-] [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 757.696024] env[62096]: DEBUG nova.network.neutron [-] [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 757.696458] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg ba2c4f351f0d4ce3851518b694e2e36b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 757.702716] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ba2c4f351f0d4ce3851518b694e2e36b [ 757.851462] env[62096]: DEBUG oslo_concurrency.lockutils [None req-c8a8569a-9a38-4dba-b714-875ef3e36f76 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.917s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 757.852304] env[62096]: ERROR nova.compute.manager [None req-c8a8569a-9a38-4dba-b714-875ef3e36f76 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] [instance: 321ab95b-6221-4bab-b442-a90926098dae] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port c0890e55-c21e-40bc-b223-5d06636aba10, please check neutron logs for more information. [ 757.852304] env[62096]: ERROR nova.compute.manager [instance: 321ab95b-6221-4bab-b442-a90926098dae] Traceback (most recent call last): [ 757.852304] env[62096]: ERROR nova.compute.manager [instance: 321ab95b-6221-4bab-b442-a90926098dae] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 757.852304] env[62096]: ERROR nova.compute.manager [instance: 321ab95b-6221-4bab-b442-a90926098dae] self.driver.spawn(context, instance, image_meta, [ 757.852304] env[62096]: ERROR nova.compute.manager [instance: 321ab95b-6221-4bab-b442-a90926098dae] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 757.852304] env[62096]: ERROR nova.compute.manager [instance: 321ab95b-6221-4bab-b442-a90926098dae] self._vmops.spawn(context, instance, image_meta, injected_files, [ 757.852304] env[62096]: ERROR nova.compute.manager [instance: 321ab95b-6221-4bab-b442-a90926098dae] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 757.852304] env[62096]: ERROR nova.compute.manager [instance: 321ab95b-6221-4bab-b442-a90926098dae] vm_ref = self.build_virtual_machine(instance, [ 757.852304] env[62096]: ERROR nova.compute.manager [instance: 321ab95b-6221-4bab-b442-a90926098dae] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 757.852304] env[62096]: ERROR nova.compute.manager [instance: 321ab95b-6221-4bab-b442-a90926098dae] vif_infos = vmwarevif.get_vif_info(self._session, [ 757.852304] env[62096]: ERROR nova.compute.manager [instance: 321ab95b-6221-4bab-b442-a90926098dae] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 757.852741] env[62096]: ERROR nova.compute.manager [instance: 321ab95b-6221-4bab-b442-a90926098dae] for vif in network_info: [ 757.852741] env[62096]: ERROR nova.compute.manager [instance: 321ab95b-6221-4bab-b442-a90926098dae] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 757.852741] env[62096]: ERROR nova.compute.manager [instance: 321ab95b-6221-4bab-b442-a90926098dae] return self._sync_wrapper(fn, *args, **kwargs) [ 757.852741] env[62096]: ERROR nova.compute.manager [instance: 321ab95b-6221-4bab-b442-a90926098dae] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 757.852741] env[62096]: ERROR nova.compute.manager [instance: 321ab95b-6221-4bab-b442-a90926098dae] self.wait() [ 757.852741] env[62096]: ERROR nova.compute.manager [instance: 321ab95b-6221-4bab-b442-a90926098dae] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 757.852741] env[62096]: ERROR nova.compute.manager [instance: 321ab95b-6221-4bab-b442-a90926098dae] self[:] = self._gt.wait() [ 757.852741] env[62096]: ERROR nova.compute.manager [instance: 321ab95b-6221-4bab-b442-a90926098dae] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 757.852741] env[62096]: ERROR nova.compute.manager [instance: 321ab95b-6221-4bab-b442-a90926098dae] return self._exit_event.wait() [ 757.852741] env[62096]: ERROR nova.compute.manager [instance: 321ab95b-6221-4bab-b442-a90926098dae] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 757.852741] env[62096]: ERROR nova.compute.manager [instance: 321ab95b-6221-4bab-b442-a90926098dae] result = hub.switch() [ 757.852741] env[62096]: ERROR nova.compute.manager [instance: 321ab95b-6221-4bab-b442-a90926098dae] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 757.852741] env[62096]: ERROR nova.compute.manager [instance: 321ab95b-6221-4bab-b442-a90926098dae] return self.greenlet.switch() [ 757.853136] env[62096]: ERROR nova.compute.manager [instance: 321ab95b-6221-4bab-b442-a90926098dae] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 757.853136] env[62096]: ERROR nova.compute.manager [instance: 321ab95b-6221-4bab-b442-a90926098dae] result = function(*args, **kwargs) [ 757.853136] env[62096]: ERROR nova.compute.manager [instance: 321ab95b-6221-4bab-b442-a90926098dae] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 757.853136] env[62096]: ERROR nova.compute.manager [instance: 321ab95b-6221-4bab-b442-a90926098dae] return func(*args, **kwargs) [ 757.853136] env[62096]: ERROR nova.compute.manager [instance: 321ab95b-6221-4bab-b442-a90926098dae] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 757.853136] env[62096]: ERROR nova.compute.manager [instance: 321ab95b-6221-4bab-b442-a90926098dae] raise e [ 757.853136] env[62096]: ERROR nova.compute.manager [instance: 321ab95b-6221-4bab-b442-a90926098dae] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 757.853136] env[62096]: ERROR nova.compute.manager [instance: 321ab95b-6221-4bab-b442-a90926098dae] nwinfo = self.network_api.allocate_for_instance( [ 757.853136] env[62096]: ERROR nova.compute.manager [instance: 321ab95b-6221-4bab-b442-a90926098dae] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 757.853136] env[62096]: ERROR nova.compute.manager [instance: 321ab95b-6221-4bab-b442-a90926098dae] created_port_ids = self._update_ports_for_instance( [ 757.853136] env[62096]: ERROR nova.compute.manager [instance: 321ab95b-6221-4bab-b442-a90926098dae] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 757.853136] env[62096]: ERROR nova.compute.manager [instance: 321ab95b-6221-4bab-b442-a90926098dae] with excutils.save_and_reraise_exception(): [ 757.853136] env[62096]: ERROR nova.compute.manager [instance: 321ab95b-6221-4bab-b442-a90926098dae] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 757.853536] env[62096]: ERROR nova.compute.manager [instance: 321ab95b-6221-4bab-b442-a90926098dae] self.force_reraise() [ 757.853536] env[62096]: ERROR nova.compute.manager [instance: 321ab95b-6221-4bab-b442-a90926098dae] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 757.853536] env[62096]: ERROR nova.compute.manager [instance: 321ab95b-6221-4bab-b442-a90926098dae] raise self.value [ 757.853536] env[62096]: ERROR nova.compute.manager [instance: 321ab95b-6221-4bab-b442-a90926098dae] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 757.853536] env[62096]: ERROR nova.compute.manager [instance: 321ab95b-6221-4bab-b442-a90926098dae] updated_port = self._update_port( [ 757.853536] env[62096]: ERROR nova.compute.manager [instance: 321ab95b-6221-4bab-b442-a90926098dae] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 757.853536] env[62096]: ERROR nova.compute.manager [instance: 321ab95b-6221-4bab-b442-a90926098dae] _ensure_no_port_binding_failure(port) [ 757.853536] env[62096]: ERROR nova.compute.manager [instance: 321ab95b-6221-4bab-b442-a90926098dae] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 757.853536] env[62096]: ERROR nova.compute.manager [instance: 321ab95b-6221-4bab-b442-a90926098dae] raise exception.PortBindingFailed(port_id=port['id']) [ 757.853536] env[62096]: ERROR nova.compute.manager [instance: 321ab95b-6221-4bab-b442-a90926098dae] nova.exception.PortBindingFailed: Binding failed for port c0890e55-c21e-40bc-b223-5d06636aba10, please check neutron logs for more information. [ 757.853536] env[62096]: ERROR nova.compute.manager [instance: 321ab95b-6221-4bab-b442-a90926098dae] [ 757.853860] env[62096]: DEBUG nova.compute.utils [None req-c8a8569a-9a38-4dba-b714-875ef3e36f76 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] [instance: 321ab95b-6221-4bab-b442-a90926098dae] Binding failed for port c0890e55-c21e-40bc-b223-5d06636aba10, please check neutron logs for more information. {{(pid=62096) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 757.854929] env[62096]: DEBUG oslo_concurrency.lockutils [None req-8017d6f3-a8b6-4835-9973-b3f86aaa9630 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.872s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 757.856502] env[62096]: INFO nova.compute.claims [None req-8017d6f3-a8b6-4835-9973-b3f86aaa9630 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 757.858163] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-8017d6f3-a8b6-4835-9973-b3f86aaa9630 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Expecting reply to msg ae0599cdc3b24a1fb558797b5803a367 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 757.859499] env[62096]: DEBUG nova.compute.manager [None req-c8a8569a-9a38-4dba-b714-875ef3e36f76 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] [instance: 321ab95b-6221-4bab-b442-a90926098dae] Build of instance 321ab95b-6221-4bab-b442-a90926098dae was re-scheduled: Binding failed for port c0890e55-c21e-40bc-b223-5d06636aba10, please check neutron logs for more information. {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 757.859954] env[62096]: DEBUG nova.compute.manager [None req-c8a8569a-9a38-4dba-b714-875ef3e36f76 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] [instance: 321ab95b-6221-4bab-b442-a90926098dae] Unplugging VIFs for instance {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 757.860167] env[62096]: DEBUG oslo_concurrency.lockutils [None req-c8a8569a-9a38-4dba-b714-875ef3e36f76 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Acquiring lock "refresh_cache-321ab95b-6221-4bab-b442-a90926098dae" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 757.860308] env[62096]: DEBUG oslo_concurrency.lockutils [None req-c8a8569a-9a38-4dba-b714-875ef3e36f76 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Acquired lock "refresh_cache-321ab95b-6221-4bab-b442-a90926098dae" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 757.860464] env[62096]: DEBUG nova.network.neutron [None req-c8a8569a-9a38-4dba-b714-875ef3e36f76 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] [instance: 321ab95b-6221-4bab-b442-a90926098dae] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 757.860821] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c8a8569a-9a38-4dba-b714-875ef3e36f76 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Expecting reply to msg 3c355aa911b24b57b66d2864f061e6ec in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 757.867273] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3c355aa911b24b57b66d2864f061e6ec [ 757.907173] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ae0599cdc3b24a1fb558797b5803a367 [ 758.021738] env[62096]: DEBUG nova.compute.manager [req-aa086ef8-13b4-4416-b411-6e712ff57eaf req-ade043ce-2717-469c-a8bb-a227ebd9fb9f service nova] [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] Received event network-changed-c7261eb5-f3d8-4ec4-ace3-402cb0fa8f20 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 758.021963] env[62096]: DEBUG nova.compute.manager [req-aa086ef8-13b4-4416-b411-6e712ff57eaf req-ade043ce-2717-469c-a8bb-a227ebd9fb9f service nova] [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] Refreshing instance network info cache due to event network-changed-c7261eb5-f3d8-4ec4-ace3-402cb0fa8f20. {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 758.022137] env[62096]: DEBUG oslo_concurrency.lockutils [req-aa086ef8-13b4-4416-b411-6e712ff57eaf req-ade043ce-2717-469c-a8bb-a227ebd9fb9f service nova] Acquiring lock "refresh_cache-fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 758.022278] env[62096]: DEBUG oslo_concurrency.lockutils [req-aa086ef8-13b4-4416-b411-6e712ff57eaf req-ade043ce-2717-469c-a8bb-a227ebd9fb9f service nova] Acquired lock "refresh_cache-fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 758.022431] env[62096]: DEBUG nova.network.neutron [req-aa086ef8-13b4-4416-b411-6e712ff57eaf req-ade043ce-2717-469c-a8bb-a227ebd9fb9f service nova] [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] Refreshing network info cache for port c7261eb5-f3d8-4ec4-ace3-402cb0fa8f20 {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 758.022894] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-aa086ef8-13b4-4416-b411-6e712ff57eaf req-ade043ce-2717-469c-a8bb-a227ebd9fb9f service nova] Expecting reply to msg 0bcd793f9f1246cabe598ccb1b680284 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 758.030097] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0bcd793f9f1246cabe598ccb1b680284 [ 758.111735] env[62096]: INFO nova.compute.manager [None req-d841432a-bb6a-4a14-a0d1-d583a8a5e9c6 tempest-ServerAddressesNegativeTestJSON-1360856578 tempest-ServerAddressesNegativeTestJSON-1360856578-project-member] [instance: 58df043b-ab2f-4e78-8bba-084fe53d3d8e] Took 1.03 seconds to deallocate network for instance. [ 758.115832] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d841432a-bb6a-4a14-a0d1-d583a8a5e9c6 tempest-ServerAddressesNegativeTestJSON-1360856578 tempest-ServerAddressesNegativeTestJSON-1360856578-project-member] Expecting reply to msg cae1cb8384b743b1a58fd362e75baaaa in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 758.157942] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cae1cb8384b743b1a58fd362e75baaaa [ 758.198716] env[62096]: DEBUG nova.network.neutron [-] [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 758.199203] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg cccfa6e1a60e4508a65f04481579c513 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 758.207447] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cccfa6e1a60e4508a65f04481579c513 [ 758.364180] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-8017d6f3-a8b6-4835-9973-b3f86aaa9630 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Expecting reply to msg f1049797022940e9a610cb58c41748a3 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 758.371960] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f1049797022940e9a610cb58c41748a3 [ 758.377890] env[62096]: DEBUG nova.network.neutron [None req-c8a8569a-9a38-4dba-b714-875ef3e36f76 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] [instance: 321ab95b-6221-4bab-b442-a90926098dae] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 758.461895] env[62096]: DEBUG nova.network.neutron [None req-c8a8569a-9a38-4dba-b714-875ef3e36f76 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] [instance: 321ab95b-6221-4bab-b442-a90926098dae] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 758.462575] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c8a8569a-9a38-4dba-b714-875ef3e36f76 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Expecting reply to msg f42e0086363a484598e397a32cc9ce47 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 758.471866] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f42e0086363a484598e397a32cc9ce47 [ 758.543159] env[62096]: DEBUG nova.network.neutron [req-aa086ef8-13b4-4416-b411-6e712ff57eaf req-ade043ce-2717-469c-a8bb-a227ebd9fb9f service nova] [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 758.617690] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d841432a-bb6a-4a14-a0d1-d583a8a5e9c6 tempest-ServerAddressesNegativeTestJSON-1360856578 tempest-ServerAddressesNegativeTestJSON-1360856578-project-member] Expecting reply to msg 31a25b1c65b843a4b233129e156912da in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 758.628830] env[62096]: DEBUG nova.network.neutron [req-aa086ef8-13b4-4416-b411-6e712ff57eaf req-ade043ce-2717-469c-a8bb-a227ebd9fb9f service nova] [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 758.629296] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-aa086ef8-13b4-4416-b411-6e712ff57eaf req-ade043ce-2717-469c-a8bb-a227ebd9fb9f service nova] Expecting reply to msg bbb6cf0c50454d6c9fd7f9b0c004199c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 758.637345] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bbb6cf0c50454d6c9fd7f9b0c004199c [ 758.651692] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 31a25b1c65b843a4b233129e156912da [ 758.701563] env[62096]: INFO nova.compute.manager [-] [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] Took 1.02 seconds to deallocate network for instance. [ 758.703757] env[62096]: DEBUG nova.compute.claims [None req-5fe8118b-4ebb-4166-ab18-d024b5229d8a tempest-ServerRescueTestJSONUnderV235-1957248754 tempest-ServerRescueTestJSONUnderV235-1957248754-project-member] [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] Aborting claim: {{(pid=62096) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 758.703896] env[62096]: DEBUG oslo_concurrency.lockutils [None req-5fe8118b-4ebb-4166-ab18-d024b5229d8a tempest-ServerRescueTestJSONUnderV235-1957248754 tempest-ServerRescueTestJSONUnderV235-1957248754-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 758.965496] env[62096]: DEBUG oslo_concurrency.lockutils [None req-c8a8569a-9a38-4dba-b714-875ef3e36f76 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Releasing lock "refresh_cache-321ab95b-6221-4bab-b442-a90926098dae" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 758.965736] env[62096]: DEBUG nova.compute.manager [None req-c8a8569a-9a38-4dba-b714-875ef3e36f76 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 758.965916] env[62096]: DEBUG nova.compute.manager [None req-c8a8569a-9a38-4dba-b714-875ef3e36f76 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] [instance: 321ab95b-6221-4bab-b442-a90926098dae] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 758.966085] env[62096]: DEBUG nova.network.neutron [None req-c8a8569a-9a38-4dba-b714-875ef3e36f76 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] [instance: 321ab95b-6221-4bab-b442-a90926098dae] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 758.995696] env[62096]: DEBUG nova.network.neutron [None req-c8a8569a-9a38-4dba-b714-875ef3e36f76 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] [instance: 321ab95b-6221-4bab-b442-a90926098dae] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 758.996179] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c8a8569a-9a38-4dba-b714-875ef3e36f76 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Expecting reply to msg 204f593e849a491fb6a56cd3ea53bf24 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 759.004360] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 204f593e849a491fb6a56cd3ea53bf24 [ 759.132776] env[62096]: DEBUG oslo_concurrency.lockutils [req-aa086ef8-13b4-4416-b411-6e712ff57eaf req-ade043ce-2717-469c-a8bb-a227ebd9fb9f service nova] Releasing lock "refresh_cache-fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 759.133061] env[62096]: DEBUG nova.compute.manager [req-aa086ef8-13b4-4416-b411-6e712ff57eaf req-ade043ce-2717-469c-a8bb-a227ebd9fb9f service nova] [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] Received event network-vif-deleted-c7261eb5-f3d8-4ec4-ace3-402cb0fa8f20 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 759.146258] env[62096]: INFO nova.scheduler.client.report [None req-d841432a-bb6a-4a14-a0d1-d583a8a5e9c6 tempest-ServerAddressesNegativeTestJSON-1360856578 tempest-ServerAddressesNegativeTestJSON-1360856578-project-member] Deleted allocations for instance 58df043b-ab2f-4e78-8bba-084fe53d3d8e [ 759.154404] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d841432a-bb6a-4a14-a0d1-d583a8a5e9c6 tempest-ServerAddressesNegativeTestJSON-1360856578 tempest-ServerAddressesNegativeTestJSON-1360856578-project-member] Expecting reply to msg e0abef31595d400a8f6213b7c08b7708 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 759.175518] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e0abef31595d400a8f6213b7c08b7708 [ 759.267050] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9906521-c24e-43e1-92a7-dd7d1d5b0198 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.278081] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24e6741e-42f3-4e0b-903b-bb127af1f229 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.310913] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd26c02b-a681-4d09-8a96-03de9444f769 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.318786] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0664fa7c-03e3-4711-8fcd-4e88cd22f70f {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.332428] env[62096]: DEBUG nova.compute.provider_tree [None req-8017d6f3-a8b6-4835-9973-b3f86aaa9630 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 759.332923] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-8017d6f3-a8b6-4835-9973-b3f86aaa9630 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Expecting reply to msg 9a314deb3d8243189fe52e6263483066 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 759.340851] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9a314deb3d8243189fe52e6263483066 [ 759.498124] env[62096]: DEBUG nova.network.neutron [None req-c8a8569a-9a38-4dba-b714-875ef3e36f76 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] [instance: 321ab95b-6221-4bab-b442-a90926098dae] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 759.498810] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c8a8569a-9a38-4dba-b714-875ef3e36f76 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Expecting reply to msg 63b149a377844e99bf513cb3d5db3c3d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 759.507676] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 63b149a377844e99bf513cb3d5db3c3d [ 759.665917] env[62096]: DEBUG oslo_concurrency.lockutils [None req-d841432a-bb6a-4a14-a0d1-d583a8a5e9c6 tempest-ServerAddressesNegativeTestJSON-1360856578 tempest-ServerAddressesNegativeTestJSON-1360856578-project-member] Lock "58df043b-ab2f-4e78-8bba-084fe53d3d8e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 158.354s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 759.666992] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-143f4a25-9058-4c14-8b18-575eda4d5e74 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Expecting reply to msg 02fc4558649b437b9988a4911462545b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 759.676402] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 02fc4558649b437b9988a4911462545b [ 759.836415] env[62096]: DEBUG nova.scheduler.client.report [None req-8017d6f3-a8b6-4835-9973-b3f86aaa9630 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 759.839175] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-8017d6f3-a8b6-4835-9973-b3f86aaa9630 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Expecting reply to msg 6a45315b549e4b9893edbbda2f00c0c4 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 759.851396] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6a45315b549e4b9893edbbda2f00c0c4 [ 760.004471] env[62096]: INFO nova.compute.manager [None req-c8a8569a-9a38-4dba-b714-875ef3e36f76 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] [instance: 321ab95b-6221-4bab-b442-a90926098dae] Took 1.03 seconds to deallocate network for instance. [ 760.004471] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c8a8569a-9a38-4dba-b714-875ef3e36f76 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Expecting reply to msg 2315f325032f4ccaade466ba64603da5 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 760.045610] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2315f325032f4ccaade466ba64603da5 [ 760.172310] env[62096]: DEBUG nova.compute.manager [None req-143f4a25-9058-4c14-8b18-575eda4d5e74 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] Starting instance... {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 760.172310] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-143f4a25-9058-4c14-8b18-575eda4d5e74 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Expecting reply to msg 66dab74f122844efbf3ec75c6378660d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 760.203126] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 66dab74f122844efbf3ec75c6378660d [ 760.348033] env[62096]: DEBUG oslo_concurrency.lockutils [None req-8017d6f3-a8b6-4835-9973-b3f86aaa9630 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.487s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 760.348033] env[62096]: DEBUG nova.compute.manager [None req-8017d6f3-a8b6-4835-9973-b3f86aaa9630 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] Start building networks asynchronously for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 760.348033] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-8017d6f3-a8b6-4835-9973-b3f86aaa9630 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Expecting reply to msg 5604d2f8e8484bbcb21a0b08382b9088 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 760.348033] env[62096]: DEBUG oslo_concurrency.lockutils [None req-fce0ab80-047e-4dc5-9e51-67218a6b1057 tempest-AttachInterfacesUnderV243Test-218968522 tempest-AttachInterfacesUnderV243Test-218968522-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.528s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 760.348033] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-fce0ab80-047e-4dc5-9e51-67218a6b1057 tempest-AttachInterfacesUnderV243Test-218968522 tempest-AttachInterfacesUnderV243Test-218968522-project-member] Expecting reply to msg 0c1fbafe9a3d4359a569fc53ba979062 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 760.383730] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5604d2f8e8484bbcb21a0b08382b9088 [ 760.393522] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0c1fbafe9a3d4359a569fc53ba979062 [ 760.508871] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c8a8569a-9a38-4dba-b714-875ef3e36f76 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Expecting reply to msg 1c33f19a69e64da386877e017d03d195 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 760.556676] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1c33f19a69e64da386877e017d03d195 [ 760.704759] env[62096]: DEBUG oslo_concurrency.lockutils [None req-143f4a25-9058-4c14-8b18-575eda4d5e74 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 760.857217] env[62096]: DEBUG nova.compute.utils [None req-8017d6f3-a8b6-4835-9973-b3f86aaa9630 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Using /dev/sd instead of None {{(pid=62096) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 760.860374] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-8017d6f3-a8b6-4835-9973-b3f86aaa9630 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Expecting reply to msg d604002f714a44ab80ea7dd00aeb7222 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 760.861044] env[62096]: DEBUG nova.compute.manager [None req-8017d6f3-a8b6-4835-9973-b3f86aaa9630 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] Allocating IP information in the background. {{(pid=62096) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 760.861103] env[62096]: DEBUG nova.network.neutron [None req-8017d6f3-a8b6-4835-9973-b3f86aaa9630 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] allocate_for_instance() {{(pid=62096) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 760.867807] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d604002f714a44ab80ea7dd00aeb7222 [ 760.930047] env[62096]: DEBUG nova.policy [None req-8017d6f3-a8b6-4835-9973-b3f86aaa9630 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '31140eb3c85f4ad9a37461a052bf1c0b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '52cd4fa24de5421eb019ce25d3cb3c79', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62096) authorize /opt/stack/nova/nova/policy.py:203}} [ 761.032222] env[62096]: INFO nova.scheduler.client.report [None req-c8a8569a-9a38-4dba-b714-875ef3e36f76 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Deleted allocations for instance 321ab95b-6221-4bab-b442-a90926098dae [ 761.039461] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c8a8569a-9a38-4dba-b714-875ef3e36f76 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Expecting reply to msg fb8e6c6d5f614ffc9e6011afe28f6a28 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 761.054647] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fb8e6c6d5f614ffc9e6011afe28f6a28 [ 761.234319] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78abdd21-f5a8-417c-a226-bc1b2863225e {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.242388] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-678a6a2f-f06e-49fa-90fc-392581ed5114 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.272244] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d144c551-c1d8-4444-97eb-492b4634ebcb {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.279549] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-514bd16f-a4eb-47aa-90ff-91d0b9fe6057 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.293447] env[62096]: DEBUG nova.compute.provider_tree [None req-fce0ab80-047e-4dc5-9e51-67218a6b1057 tempest-AttachInterfacesUnderV243Test-218968522 tempest-AttachInterfacesUnderV243Test-218968522-project-member] Updating inventory in ProviderTree for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 761.294044] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-fce0ab80-047e-4dc5-9e51-67218a6b1057 tempest-AttachInterfacesUnderV243Test-218968522 tempest-AttachInterfacesUnderV243Test-218968522-project-member] Expecting reply to msg e64bd66843194de7ae7828228d1385f3 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 761.302405] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e64bd66843194de7ae7828228d1385f3 [ 761.365119] env[62096]: DEBUG nova.compute.manager [None req-8017d6f3-a8b6-4835-9973-b3f86aaa9630 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] Start building block device mappings for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 761.366105] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-8017d6f3-a8b6-4835-9973-b3f86aaa9630 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Expecting reply to msg 9d2e31d560a24a58a4bfd454305b68f9 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 761.423362] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9d2e31d560a24a58a4bfd454305b68f9 [ 761.541518] env[62096]: DEBUG oslo_concurrency.lockutils [None req-c8a8569a-9a38-4dba-b714-875ef3e36f76 tempest-SecurityGroupsTestJSON-1657304550 tempest-SecurityGroupsTestJSON-1657304550-project-member] Lock "321ab95b-6221-4bab-b442-a90926098dae" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 154.443s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 761.542153] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a3983baa-43a6-408d-8a39-88c7bf1db267 tempest-TenantUsagesTestJSON-1395762051 tempest-TenantUsagesTestJSON-1395762051-project-member] Expecting reply to msg 739931b6c8e8421fa82389fb1684ded1 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 761.552070] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 739931b6c8e8421fa82389fb1684ded1 [ 761.814754] env[62096]: DEBUG nova.network.neutron [None req-8017d6f3-a8b6-4835-9973-b3f86aaa9630 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] Successfully created port: 8f78fa6f-374c-4405-b1b5-d31f92a1df08 {{(pid=62096) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 761.815919] env[62096]: ERROR nova.scheduler.client.report [None req-fce0ab80-047e-4dc5-9e51-67218a6b1057 tempest-AttachInterfacesUnderV243Test-218968522 tempest-AttachInterfacesUnderV243Test-218968522-project-member] [req-b75faebe-b1ed-4a4d-84b2-94e9457337f5] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 6eefe13c-ab55-4c03-987f-47a62756c3b3. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-b75faebe-b1ed-4a4d-84b2-94e9457337f5"}]}: nova.exception.PortBindingFailed: Binding failed for port 479cc0d6-65e1-4c95-a612-c8b87a7f1f27, please check neutron logs for more information. [ 761.836704] env[62096]: DEBUG nova.scheduler.client.report [None req-fce0ab80-047e-4dc5-9e51-67218a6b1057 tempest-AttachInterfacesUnderV243Test-218968522 tempest-AttachInterfacesUnderV243Test-218968522-project-member] Refreshing inventories for resource provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 761.861749] env[62096]: DEBUG nova.scheduler.client.report [None req-fce0ab80-047e-4dc5-9e51-67218a6b1057 tempest-AttachInterfacesUnderV243Test-218968522 tempest-AttachInterfacesUnderV243Test-218968522-project-member] Updating ProviderTree inventory for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 761.862091] env[62096]: DEBUG nova.compute.provider_tree [None req-fce0ab80-047e-4dc5-9e51-67218a6b1057 tempest-AttachInterfacesUnderV243Test-218968522 tempest-AttachInterfacesUnderV243Test-218968522-project-member] Updating inventory in ProviderTree for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 761.875498] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-8017d6f3-a8b6-4835-9973-b3f86aaa9630 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Expecting reply to msg 5a69419bbf7049bfb856137ef8ee4dc9 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 761.877683] env[62096]: DEBUG nova.scheduler.client.report [None req-fce0ab80-047e-4dc5-9e51-67218a6b1057 tempest-AttachInterfacesUnderV243Test-218968522 tempest-AttachInterfacesUnderV243Test-218968522-project-member] Refreshing aggregate associations for resource provider 6eefe13c-ab55-4c03-987f-47a62756c3b3, aggregates: None {{(pid=62096) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 761.912606] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5a69419bbf7049bfb856137ef8ee4dc9 [ 761.914446] env[62096]: DEBUG nova.scheduler.client.report [None req-fce0ab80-047e-4dc5-9e51-67218a6b1057 tempest-AttachInterfacesUnderV243Test-218968522 tempest-AttachInterfacesUnderV243Test-218968522-project-member] Refreshing trait associations for resource provider 6eefe13c-ab55-4c03-987f-47a62756c3b3, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO {{(pid=62096) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 762.044162] env[62096]: DEBUG nova.compute.manager [None req-a3983baa-43a6-408d-8a39-88c7bf1db267 tempest-TenantUsagesTestJSON-1395762051 tempest-TenantUsagesTestJSON-1395762051-project-member] [instance: e8631ec4-1823-46d2-8553-05e3336fed32] Starting instance... {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 762.047031] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a3983baa-43a6-408d-8a39-88c7bf1db267 tempest-TenantUsagesTestJSON-1395762051 tempest-TenantUsagesTestJSON-1395762051-project-member] Expecting reply to msg d83c6ae6155f4b16bb149b79445c8a75 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 762.088771] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d83c6ae6155f4b16bb149b79445c8a75 [ 762.304504] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d5404a6-bf86-4185-8278-c4211c400734 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.312467] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-651ed869-068f-40e5-a529-eb40821d1b5c {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.350495] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33d7cdf6-11f3-4593-8783-6c37fbbda421 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.358149] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-632e009d-b172-49aa-8196-5e90e6eb7409 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.374530] env[62096]: DEBUG nova.compute.provider_tree [None req-fce0ab80-047e-4dc5-9e51-67218a6b1057 tempest-AttachInterfacesUnderV243Test-218968522 tempest-AttachInterfacesUnderV243Test-218968522-project-member] Updating inventory in ProviderTree for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 762.375155] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-fce0ab80-047e-4dc5-9e51-67218a6b1057 tempest-AttachInterfacesUnderV243Test-218968522 tempest-AttachInterfacesUnderV243Test-218968522-project-member] Expecting reply to msg f882046cabcb4a818114c66760286c4c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 762.381445] env[62096]: DEBUG nova.compute.manager [None req-8017d6f3-a8b6-4835-9973-b3f86aaa9630 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] Start spawning the instance on the hypervisor. {{(pid=62096) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 762.384780] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f882046cabcb4a818114c66760286c4c [ 762.406033] env[62096]: DEBUG nova.virt.hardware [None req-8017d6f3-a8b6-4835-9973-b3f86aaa9630 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-19T11:52:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-19T11:51:55Z,direct_url=,disk_format='vmdk',id=fb764baa-3805-45c4-a694-aa91b0932110,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bd76085d004b408bbcf1ebf76ba2593e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-19T11:51:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 762.406262] env[62096]: DEBUG nova.virt.hardware [None req-8017d6f3-a8b6-4835-9973-b3f86aaa9630 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Flavor limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 762.406411] env[62096]: DEBUG nova.virt.hardware [None req-8017d6f3-a8b6-4835-9973-b3f86aaa9630 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Image limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 762.406651] env[62096]: DEBUG nova.virt.hardware [None req-8017d6f3-a8b6-4835-9973-b3f86aaa9630 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Flavor pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 762.406815] env[62096]: DEBUG nova.virt.hardware [None req-8017d6f3-a8b6-4835-9973-b3f86aaa9630 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Image pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 762.406960] env[62096]: DEBUG nova.virt.hardware [None req-8017d6f3-a8b6-4835-9973-b3f86aaa9630 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 762.407403] env[62096]: DEBUG nova.virt.hardware [None req-8017d6f3-a8b6-4835-9973-b3f86aaa9630 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 762.407570] env[62096]: DEBUG nova.virt.hardware [None req-8017d6f3-a8b6-4835-9973-b3f86aaa9630 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 762.407735] env[62096]: DEBUG nova.virt.hardware [None req-8017d6f3-a8b6-4835-9973-b3f86aaa9630 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Got 1 possible topologies {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 762.407893] env[62096]: DEBUG nova.virt.hardware [None req-8017d6f3-a8b6-4835-9973-b3f86aaa9630 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 762.408081] env[62096]: DEBUG nova.virt.hardware [None req-8017d6f3-a8b6-4835-9973-b3f86aaa9630 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 762.408930] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a14f7b6-5bb5-4ddd-a08e-50c485e4ce2b {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.416800] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8e3d8dc-60c9-498c-b285-83f22567e0f3 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.578902] env[62096]: DEBUG oslo_concurrency.lockutils [None req-a3983baa-43a6-408d-8a39-88c7bf1db267 tempest-TenantUsagesTestJSON-1395762051 tempest-TenantUsagesTestJSON-1395762051-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 762.908622] env[62096]: DEBUG nova.scheduler.client.report [None req-fce0ab80-047e-4dc5-9e51-67218a6b1057 tempest-AttachInterfacesUnderV243Test-218968522 tempest-AttachInterfacesUnderV243Test-218968522-project-member] Updated inventory for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 with generation 78 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:957}} [ 762.908903] env[62096]: DEBUG nova.compute.provider_tree [None req-fce0ab80-047e-4dc5-9e51-67218a6b1057 tempest-AttachInterfacesUnderV243Test-218968522 tempest-AttachInterfacesUnderV243Test-218968522-project-member] Updating resource provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 generation from 78 to 79 during operation: update_inventory {{(pid=62096) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 762.909041] env[62096]: DEBUG nova.compute.provider_tree [None req-fce0ab80-047e-4dc5-9e51-67218a6b1057 tempest-AttachInterfacesUnderV243Test-218968522 tempest-AttachInterfacesUnderV243Test-218968522-project-member] Updating inventory in ProviderTree for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 762.911587] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-fce0ab80-047e-4dc5-9e51-67218a6b1057 tempest-AttachInterfacesUnderV243Test-218968522 tempest-AttachInterfacesUnderV243Test-218968522-project-member] Expecting reply to msg f65b41113c1a4100a1b6f0c189c84ce5 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 762.926253] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f65b41113c1a4100a1b6f0c189c84ce5 [ 763.412596] env[62096]: ERROR nova.compute.manager [None req-8017d6f3-a8b6-4835-9973-b3f86aaa9630 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 8f78fa6f-374c-4405-b1b5-d31f92a1df08, please check neutron logs for more information. [ 763.412596] env[62096]: ERROR nova.compute.manager Traceback (most recent call last): [ 763.412596] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 763.412596] env[62096]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 763.412596] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 763.412596] env[62096]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 763.412596] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 763.412596] env[62096]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 763.412596] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 763.412596] env[62096]: ERROR nova.compute.manager self.force_reraise() [ 763.412596] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 763.412596] env[62096]: ERROR nova.compute.manager raise self.value [ 763.412596] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 763.412596] env[62096]: ERROR nova.compute.manager updated_port = self._update_port( [ 763.412596] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 763.412596] env[62096]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 763.413002] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 763.413002] env[62096]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 763.413002] env[62096]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 8f78fa6f-374c-4405-b1b5-d31f92a1df08, please check neutron logs for more information. [ 763.413002] env[62096]: ERROR nova.compute.manager [ 763.413002] env[62096]: Traceback (most recent call last): [ 763.413002] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 763.413002] env[62096]: listener.cb(fileno) [ 763.413002] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 763.413002] env[62096]: result = function(*args, **kwargs) [ 763.413002] env[62096]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 763.413002] env[62096]: return func(*args, **kwargs) [ 763.413002] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 763.413002] env[62096]: raise e [ 763.413002] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 763.413002] env[62096]: nwinfo = self.network_api.allocate_for_instance( [ 763.413002] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 763.413002] env[62096]: created_port_ids = self._update_ports_for_instance( [ 763.413002] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 763.413002] env[62096]: with excutils.save_and_reraise_exception(): [ 763.413002] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 763.413002] env[62096]: self.force_reraise() [ 763.413002] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 763.413002] env[62096]: raise self.value [ 763.413002] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 763.413002] env[62096]: updated_port = self._update_port( [ 763.413002] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 763.413002] env[62096]: _ensure_no_port_binding_failure(port) [ 763.413002] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 763.413002] env[62096]: raise exception.PortBindingFailed(port_id=port['id']) [ 763.413724] env[62096]: nova.exception.PortBindingFailed: Binding failed for port 8f78fa6f-374c-4405-b1b5-d31f92a1df08, please check neutron logs for more information. [ 763.413724] env[62096]: Removing descriptor: 16 [ 763.413724] env[62096]: ERROR nova.compute.manager [None req-8017d6f3-a8b6-4835-9973-b3f86aaa9630 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 8f78fa6f-374c-4405-b1b5-d31f92a1df08, please check neutron logs for more information. [ 763.413724] env[62096]: ERROR nova.compute.manager [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] Traceback (most recent call last): [ 763.413724] env[62096]: ERROR nova.compute.manager [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 763.413724] env[62096]: ERROR nova.compute.manager [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] yield resources [ 763.413724] env[62096]: ERROR nova.compute.manager [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 763.413724] env[62096]: ERROR nova.compute.manager [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] self.driver.spawn(context, instance, image_meta, [ 763.413724] env[62096]: ERROR nova.compute.manager [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 763.413724] env[62096]: ERROR nova.compute.manager [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] self._vmops.spawn(context, instance, image_meta, injected_files, [ 763.413724] env[62096]: ERROR nova.compute.manager [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 763.413724] env[62096]: ERROR nova.compute.manager [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] vm_ref = self.build_virtual_machine(instance, [ 763.414138] env[62096]: ERROR nova.compute.manager [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 763.414138] env[62096]: ERROR nova.compute.manager [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] vif_infos = vmwarevif.get_vif_info(self._session, [ 763.414138] env[62096]: ERROR nova.compute.manager [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 763.414138] env[62096]: ERROR nova.compute.manager [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] for vif in network_info: [ 763.414138] env[62096]: ERROR nova.compute.manager [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 763.414138] env[62096]: ERROR nova.compute.manager [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] return self._sync_wrapper(fn, *args, **kwargs) [ 763.414138] env[62096]: ERROR nova.compute.manager [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 763.414138] env[62096]: ERROR nova.compute.manager [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] self.wait() [ 763.414138] env[62096]: ERROR nova.compute.manager [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 763.414138] env[62096]: ERROR nova.compute.manager [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] self[:] = self._gt.wait() [ 763.414138] env[62096]: ERROR nova.compute.manager [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 763.414138] env[62096]: ERROR nova.compute.manager [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] return self._exit_event.wait() [ 763.414138] env[62096]: ERROR nova.compute.manager [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 763.414512] env[62096]: ERROR nova.compute.manager [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] result = hub.switch() [ 763.414512] env[62096]: ERROR nova.compute.manager [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 763.414512] env[62096]: ERROR nova.compute.manager [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] return self.greenlet.switch() [ 763.414512] env[62096]: ERROR nova.compute.manager [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 763.414512] env[62096]: ERROR nova.compute.manager [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] result = function(*args, **kwargs) [ 763.414512] env[62096]: ERROR nova.compute.manager [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 763.414512] env[62096]: ERROR nova.compute.manager [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] return func(*args, **kwargs) [ 763.414512] env[62096]: ERROR nova.compute.manager [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 763.414512] env[62096]: ERROR nova.compute.manager [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] raise e [ 763.414512] env[62096]: ERROR nova.compute.manager [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 763.414512] env[62096]: ERROR nova.compute.manager [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] nwinfo = self.network_api.allocate_for_instance( [ 763.414512] env[62096]: ERROR nova.compute.manager [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 763.414512] env[62096]: ERROR nova.compute.manager [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] created_port_ids = self._update_ports_for_instance( [ 763.414871] env[62096]: ERROR nova.compute.manager [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 763.414871] env[62096]: ERROR nova.compute.manager [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] with excutils.save_and_reraise_exception(): [ 763.414871] env[62096]: ERROR nova.compute.manager [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 763.414871] env[62096]: ERROR nova.compute.manager [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] self.force_reraise() [ 763.414871] env[62096]: ERROR nova.compute.manager [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 763.414871] env[62096]: ERROR nova.compute.manager [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] raise self.value [ 763.414871] env[62096]: ERROR nova.compute.manager [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 763.414871] env[62096]: ERROR nova.compute.manager [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] updated_port = self._update_port( [ 763.414871] env[62096]: ERROR nova.compute.manager [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 763.414871] env[62096]: ERROR nova.compute.manager [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] _ensure_no_port_binding_failure(port) [ 763.414871] env[62096]: ERROR nova.compute.manager [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 763.414871] env[62096]: ERROR nova.compute.manager [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] raise exception.PortBindingFailed(port_id=port['id']) [ 763.415193] env[62096]: ERROR nova.compute.manager [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] nova.exception.PortBindingFailed: Binding failed for port 8f78fa6f-374c-4405-b1b5-d31f92a1df08, please check neutron logs for more information. [ 763.415193] env[62096]: ERROR nova.compute.manager [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] [ 763.415193] env[62096]: INFO nova.compute.manager [None req-8017d6f3-a8b6-4835-9973-b3f86aaa9630 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] Terminating instance [ 763.417729] env[62096]: DEBUG oslo_concurrency.lockutils [None req-8017d6f3-a8b6-4835-9973-b3f86aaa9630 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Acquiring lock "refresh_cache-510189aa-204c-4fd6-90d5-47a7ce5f7630" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 763.417729] env[62096]: DEBUG oslo_concurrency.lockutils [None req-8017d6f3-a8b6-4835-9973-b3f86aaa9630 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Acquired lock "refresh_cache-510189aa-204c-4fd6-90d5-47a7ce5f7630" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 763.417729] env[62096]: DEBUG nova.network.neutron [None req-8017d6f3-a8b6-4835-9973-b3f86aaa9630 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 763.417729] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-8017d6f3-a8b6-4835-9973-b3f86aaa9630 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Expecting reply to msg 33d48bde28244ed5b58b6148c589c3b4 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 763.419076] env[62096]: DEBUG oslo_concurrency.lockutils [None req-fce0ab80-047e-4dc5-9e51-67218a6b1057 tempest-AttachInterfacesUnderV243Test-218968522 tempest-AttachInterfacesUnderV243Test-218968522-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 3.074s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 763.419624] env[62096]: ERROR nova.compute.manager [None req-fce0ab80-047e-4dc5-9e51-67218a6b1057 tempest-AttachInterfacesUnderV243Test-218968522 tempest-AttachInterfacesUnderV243Test-218968522-project-member] [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 479cc0d6-65e1-4c95-a612-c8b87a7f1f27, please check neutron logs for more information. [ 763.419624] env[62096]: ERROR nova.compute.manager [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] Traceback (most recent call last): [ 763.419624] env[62096]: ERROR nova.compute.manager [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 763.419624] env[62096]: ERROR nova.compute.manager [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] self.driver.spawn(context, instance, image_meta, [ 763.419624] env[62096]: ERROR nova.compute.manager [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 763.419624] env[62096]: ERROR nova.compute.manager [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] self._vmops.spawn(context, instance, image_meta, injected_files, [ 763.419624] env[62096]: ERROR nova.compute.manager [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 763.419624] env[62096]: ERROR nova.compute.manager [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] vm_ref = self.build_virtual_machine(instance, [ 763.419624] env[62096]: ERROR nova.compute.manager [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 763.419624] env[62096]: ERROR nova.compute.manager [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] vif_infos = vmwarevif.get_vif_info(self._session, [ 763.419624] env[62096]: ERROR nova.compute.manager [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 763.419864] env[62096]: ERROR nova.compute.manager [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] for vif in network_info: [ 763.419864] env[62096]: ERROR nova.compute.manager [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 763.419864] env[62096]: ERROR nova.compute.manager [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] return self._sync_wrapper(fn, *args, **kwargs) [ 763.419864] env[62096]: ERROR nova.compute.manager [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 763.419864] env[62096]: ERROR nova.compute.manager [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] self.wait() [ 763.419864] env[62096]: ERROR nova.compute.manager [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 763.419864] env[62096]: ERROR nova.compute.manager [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] self[:] = self._gt.wait() [ 763.419864] env[62096]: ERROR nova.compute.manager [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 763.419864] env[62096]: ERROR nova.compute.manager [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] return self._exit_event.wait() [ 763.419864] env[62096]: ERROR nova.compute.manager [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 763.419864] env[62096]: ERROR nova.compute.manager [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] result = hub.switch() [ 763.419864] env[62096]: ERROR nova.compute.manager [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 763.419864] env[62096]: ERROR nova.compute.manager [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] return self.greenlet.switch() [ 763.420136] env[62096]: ERROR nova.compute.manager [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 763.420136] env[62096]: ERROR nova.compute.manager [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] result = function(*args, **kwargs) [ 763.420136] env[62096]: ERROR nova.compute.manager [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 763.420136] env[62096]: ERROR nova.compute.manager [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] return func(*args, **kwargs) [ 763.420136] env[62096]: ERROR nova.compute.manager [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 763.420136] env[62096]: ERROR nova.compute.manager [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] raise e [ 763.420136] env[62096]: ERROR nova.compute.manager [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 763.420136] env[62096]: ERROR nova.compute.manager [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] nwinfo = self.network_api.allocate_for_instance( [ 763.420136] env[62096]: ERROR nova.compute.manager [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 763.420136] env[62096]: ERROR nova.compute.manager [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] created_port_ids = self._update_ports_for_instance( [ 763.420136] env[62096]: ERROR nova.compute.manager [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 763.420136] env[62096]: ERROR nova.compute.manager [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] with excutils.save_and_reraise_exception(): [ 763.420136] env[62096]: ERROR nova.compute.manager [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 763.420451] env[62096]: ERROR nova.compute.manager [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] self.force_reraise() [ 763.420451] env[62096]: ERROR nova.compute.manager [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 763.420451] env[62096]: ERROR nova.compute.manager [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] raise self.value [ 763.420451] env[62096]: ERROR nova.compute.manager [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 763.420451] env[62096]: ERROR nova.compute.manager [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] updated_port = self._update_port( [ 763.420451] env[62096]: ERROR nova.compute.manager [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 763.420451] env[62096]: ERROR nova.compute.manager [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] _ensure_no_port_binding_failure(port) [ 763.420451] env[62096]: ERROR nova.compute.manager [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 763.420451] env[62096]: ERROR nova.compute.manager [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] raise exception.PortBindingFailed(port_id=port['id']) [ 763.420451] env[62096]: ERROR nova.compute.manager [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] nova.exception.PortBindingFailed: Binding failed for port 479cc0d6-65e1-4c95-a612-c8b87a7f1f27, please check neutron logs for more information. [ 763.420451] env[62096]: ERROR nova.compute.manager [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] [ 763.420677] env[62096]: DEBUG nova.compute.utils [None req-fce0ab80-047e-4dc5-9e51-67218a6b1057 tempest-AttachInterfacesUnderV243Test-218968522 tempest-AttachInterfacesUnderV243Test-218968522-project-member] [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] Binding failed for port 479cc0d6-65e1-4c95-a612-c8b87a7f1f27, please check neutron logs for more information. {{(pid=62096) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 763.421688] env[62096]: DEBUG oslo_concurrency.lockutils [None req-d5f614b0-b494-4335-b00e-6b8de6a76cbe tempest-ServersWithSpecificFlavorTestJSON-68500714 tempest-ServersWithSpecificFlavorTestJSON-68500714-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.465s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 763.424191] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d5f614b0-b494-4335-b00e-6b8de6a76cbe tempest-ServersWithSpecificFlavorTestJSON-68500714 tempest-ServersWithSpecificFlavorTestJSON-68500714-project-member] Expecting reply to msg 7dd44dac5f2746e797412d0e4130313b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 763.425824] env[62096]: DEBUG nova.compute.manager [None req-fce0ab80-047e-4dc5-9e51-67218a6b1057 tempest-AttachInterfacesUnderV243Test-218968522 tempest-AttachInterfacesUnderV243Test-218968522-project-member] [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] Build of instance 59b37648-7b23-4ae3-90e6-867fbbde25df was re-scheduled: Binding failed for port 479cc0d6-65e1-4c95-a612-c8b87a7f1f27, please check neutron logs for more information. {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 763.426261] env[62096]: DEBUG nova.compute.manager [None req-fce0ab80-047e-4dc5-9e51-67218a6b1057 tempest-AttachInterfacesUnderV243Test-218968522 tempest-AttachInterfacesUnderV243Test-218968522-project-member] [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] Unplugging VIFs for instance {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 763.427282] env[62096]: DEBUG oslo_concurrency.lockutils [None req-fce0ab80-047e-4dc5-9e51-67218a6b1057 tempest-AttachInterfacesUnderV243Test-218968522 tempest-AttachInterfacesUnderV243Test-218968522-project-member] Acquiring lock "refresh_cache-59b37648-7b23-4ae3-90e6-867fbbde25df" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 763.427282] env[62096]: DEBUG oslo_concurrency.lockutils [None req-fce0ab80-047e-4dc5-9e51-67218a6b1057 tempest-AttachInterfacesUnderV243Test-218968522 tempest-AttachInterfacesUnderV243Test-218968522-project-member] Acquired lock "refresh_cache-59b37648-7b23-4ae3-90e6-867fbbde25df" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 763.427282] env[62096]: DEBUG nova.network.neutron [None req-fce0ab80-047e-4dc5-9e51-67218a6b1057 tempest-AttachInterfacesUnderV243Test-218968522 tempest-AttachInterfacesUnderV243Test-218968522-project-member] [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 763.427282] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-fce0ab80-047e-4dc5-9e51-67218a6b1057 tempest-AttachInterfacesUnderV243Test-218968522 tempest-AttachInterfacesUnderV243Test-218968522-project-member] Expecting reply to msg 716196ed66204df9be74e34428a83963 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 763.428501] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 33d48bde28244ed5b58b6148c589c3b4 [ 763.433176] env[62096]: DEBUG nova.compute.manager [req-a5a7b5c3-c264-4dc8-8094-dde2031834ef req-9a1cb171-870c-4b98-8827-40053371d1da service nova] [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] Received event network-changed-8f78fa6f-374c-4405-b1b5-d31f92a1df08 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 763.433370] env[62096]: DEBUG nova.compute.manager [req-a5a7b5c3-c264-4dc8-8094-dde2031834ef req-9a1cb171-870c-4b98-8827-40053371d1da service nova] [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] Refreshing instance network info cache due to event network-changed-8f78fa6f-374c-4405-b1b5-d31f92a1df08. {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 763.433571] env[62096]: DEBUG oslo_concurrency.lockutils [req-a5a7b5c3-c264-4dc8-8094-dde2031834ef req-9a1cb171-870c-4b98-8827-40053371d1da service nova] Acquiring lock "refresh_cache-510189aa-204c-4fd6-90d5-47a7ce5f7630" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 763.434497] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 716196ed66204df9be74e34428a83963 [ 763.460570] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7dd44dac5f2746e797412d0e4130313b [ 763.937817] env[62096]: DEBUG nova.network.neutron [None req-8017d6f3-a8b6-4835-9973-b3f86aaa9630 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 763.963259] env[62096]: DEBUG nova.network.neutron [None req-fce0ab80-047e-4dc5-9e51-67218a6b1057 tempest-AttachInterfacesUnderV243Test-218968522 tempest-AttachInterfacesUnderV243Test-218968522-project-member] [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 764.125314] env[62096]: DEBUG nova.network.neutron [None req-8017d6f3-a8b6-4835-9973-b3f86aaa9630 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 764.126045] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-8017d6f3-a8b6-4835-9973-b3f86aaa9630 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Expecting reply to msg baa3f5d1eacf4661ba6b28719b002f1c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 764.128272] env[62096]: DEBUG nova.network.neutron [None req-fce0ab80-047e-4dc5-9e51-67218a6b1057 tempest-AttachInterfacesUnderV243Test-218968522 tempest-AttachInterfacesUnderV243Test-218968522-project-member] [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 764.128818] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-fce0ab80-047e-4dc5-9e51-67218a6b1057 tempest-AttachInterfacesUnderV243Test-218968522 tempest-AttachInterfacesUnderV243Test-218968522-project-member] Expecting reply to msg 74925f3c86034bf38a8a804cf365b6a2 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 764.135339] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg baa3f5d1eacf4661ba6b28719b002f1c [ 764.140269] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 74925f3c86034bf38a8a804cf365b6a2 [ 764.313330] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d3a0701-a3ba-4cd3-8ed3-e5973dc4f8dd {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.321211] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16db9336-5e88-45f2-aee8-67f29fe7189c {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.352558] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfaf34ed-1379-4f76-a5f7-eddab1abed65 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.359998] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-add3e35b-eb80-4c02-89f8-7e0e25d16307 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.373318] env[62096]: DEBUG nova.compute.provider_tree [None req-d5f614b0-b494-4335-b00e-6b8de6a76cbe tempest-ServersWithSpecificFlavorTestJSON-68500714 tempest-ServersWithSpecificFlavorTestJSON-68500714-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 764.373827] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d5f614b0-b494-4335-b00e-6b8de6a76cbe tempest-ServersWithSpecificFlavorTestJSON-68500714 tempest-ServersWithSpecificFlavorTestJSON-68500714-project-member] Expecting reply to msg 85cb5322e4c145cf93bb364b2cf30ec6 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 764.381526] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 85cb5322e4c145cf93bb364b2cf30ec6 [ 764.629224] env[62096]: DEBUG oslo_concurrency.lockutils [None req-8017d6f3-a8b6-4835-9973-b3f86aaa9630 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Releasing lock "refresh_cache-510189aa-204c-4fd6-90d5-47a7ce5f7630" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 764.629810] env[62096]: DEBUG nova.compute.manager [None req-8017d6f3-a8b6-4835-9973-b3f86aaa9630 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] Start destroying the instance on the hypervisor. {{(pid=62096) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 764.630091] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-8017d6f3-a8b6-4835-9973-b3f86aaa9630 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] Destroying instance {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 764.630568] env[62096]: DEBUG oslo_concurrency.lockutils [req-a5a7b5c3-c264-4dc8-8094-dde2031834ef req-9a1cb171-870c-4b98-8827-40053371d1da service nova] Acquired lock "refresh_cache-510189aa-204c-4fd6-90d5-47a7ce5f7630" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 764.630795] env[62096]: DEBUG nova.network.neutron [req-a5a7b5c3-c264-4dc8-8094-dde2031834ef req-9a1cb171-870c-4b98-8827-40053371d1da service nova] [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] Refreshing network info cache for port 8f78fa6f-374c-4405-b1b5-d31f92a1df08 {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 764.631362] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-a5a7b5c3-c264-4dc8-8094-dde2031834ef req-9a1cb171-870c-4b98-8827-40053371d1da service nova] Expecting reply to msg 108ad822d6d84d80afeedcc90c557a49 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 764.632758] env[62096]: DEBUG oslo_concurrency.lockutils [None req-fce0ab80-047e-4dc5-9e51-67218a6b1057 tempest-AttachInterfacesUnderV243Test-218968522 tempest-AttachInterfacesUnderV243Test-218968522-project-member] Releasing lock "refresh_cache-59b37648-7b23-4ae3-90e6-867fbbde25df" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 764.633049] env[62096]: DEBUG nova.compute.manager [None req-fce0ab80-047e-4dc5-9e51-67218a6b1057 tempest-AttachInterfacesUnderV243Test-218968522 tempest-AttachInterfacesUnderV243Test-218968522-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 764.633327] env[62096]: DEBUG nova.compute.manager [None req-fce0ab80-047e-4dc5-9e51-67218a6b1057 tempest-AttachInterfacesUnderV243Test-218968522 tempest-AttachInterfacesUnderV243Test-218968522-project-member] [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 764.633591] env[62096]: DEBUG nova.network.neutron [None req-fce0ab80-047e-4dc5-9e51-67218a6b1057 tempest-AttachInterfacesUnderV243Test-218968522 tempest-AttachInterfacesUnderV243Test-218968522-project-member] [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 764.636550] env[62096]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-83aa77a0-c389-433a-8567-ada19d0368e6 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.651643] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdbc6752-2c7a-4f87-869a-58460805c22e {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.667234] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 108ad822d6d84d80afeedcc90c557a49 [ 764.668827] env[62096]: DEBUG nova.network.neutron [None req-fce0ab80-047e-4dc5-9e51-67218a6b1057 tempest-AttachInterfacesUnderV243Test-218968522 tempest-AttachInterfacesUnderV243Test-218968522-project-member] [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 764.669884] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-fce0ab80-047e-4dc5-9e51-67218a6b1057 tempest-AttachInterfacesUnderV243Test-218968522 tempest-AttachInterfacesUnderV243Test-218968522-project-member] Expecting reply to msg fa1fe0a400f0425387c2cdd276bfa2d4 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 764.677094] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fa1fe0a400f0425387c2cdd276bfa2d4 [ 764.686658] env[62096]: WARNING nova.virt.vmwareapi.vmops [None req-8017d6f3-a8b6-4835-9973-b3f86aaa9630 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 510189aa-204c-4fd6-90d5-47a7ce5f7630 could not be found. [ 764.687024] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-8017d6f3-a8b6-4835-9973-b3f86aaa9630 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] Instance destroyed {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 764.687334] env[62096]: INFO nova.compute.manager [None req-8017d6f3-a8b6-4835-9973-b3f86aaa9630 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] Took 0.06 seconds to destroy the instance on the hypervisor. [ 764.687883] env[62096]: DEBUG oslo.service.loopingcall [None req-8017d6f3-a8b6-4835-9973-b3f86aaa9630 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 764.688118] env[62096]: DEBUG nova.compute.manager [-] [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 764.688284] env[62096]: DEBUG nova.network.neutron [-] [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 764.720937] env[62096]: DEBUG nova.network.neutron [-] [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 764.720937] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 811e08a43f084da1b82b447dd3c93c80 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 764.727488] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 811e08a43f084da1b82b447dd3c93c80 [ 764.877346] env[62096]: DEBUG nova.scheduler.client.report [None req-d5f614b0-b494-4335-b00e-6b8de6a76cbe tempest-ServersWithSpecificFlavorTestJSON-68500714 tempest-ServersWithSpecificFlavorTestJSON-68500714-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 764.879815] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d5f614b0-b494-4335-b00e-6b8de6a76cbe tempest-ServersWithSpecificFlavorTestJSON-68500714 tempest-ServersWithSpecificFlavorTestJSON-68500714-project-member] Expecting reply to msg 81be2d9abead4e198f4060964e39863b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 764.895731] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 81be2d9abead4e198f4060964e39863b [ 765.171707] env[62096]: DEBUG nova.network.neutron [None req-fce0ab80-047e-4dc5-9e51-67218a6b1057 tempest-AttachInterfacesUnderV243Test-218968522 tempest-AttachInterfacesUnderV243Test-218968522-project-member] [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 765.172525] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-fce0ab80-047e-4dc5-9e51-67218a6b1057 tempest-AttachInterfacesUnderV243Test-218968522 tempest-AttachInterfacesUnderV243Test-218968522-project-member] Expecting reply to msg 8b91e3ef77d84806b4fbf31b069db5e4 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 765.184217] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8b91e3ef77d84806b4fbf31b069db5e4 [ 765.190979] env[62096]: DEBUG nova.network.neutron [req-a5a7b5c3-c264-4dc8-8094-dde2031834ef req-9a1cb171-870c-4b98-8827-40053371d1da service nova] [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 765.230253] env[62096]: DEBUG nova.network.neutron [-] [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 765.230253] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg f72ee32bb97241218c11a00d944968d6 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 765.248222] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f72ee32bb97241218c11a00d944968d6 [ 765.314447] env[62096]: DEBUG nova.network.neutron [req-a5a7b5c3-c264-4dc8-8094-dde2031834ef req-9a1cb171-870c-4b98-8827-40053371d1da service nova] [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 765.315461] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-a5a7b5c3-c264-4dc8-8094-dde2031834ef req-9a1cb171-870c-4b98-8827-40053371d1da service nova] Expecting reply to msg f02596923d2749c1bd42f65bda65f3db in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 765.324297] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f02596923d2749c1bd42f65bda65f3db [ 765.382727] env[62096]: DEBUG oslo_concurrency.lockutils [None req-d5f614b0-b494-4335-b00e-6b8de6a76cbe tempest-ServersWithSpecificFlavorTestJSON-68500714 tempest-ServersWithSpecificFlavorTestJSON-68500714-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.961s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 765.383403] env[62096]: ERROR nova.compute.manager [None req-d5f614b0-b494-4335-b00e-6b8de6a76cbe tempest-ServersWithSpecificFlavorTestJSON-68500714 tempest-ServersWithSpecificFlavorTestJSON-68500714-project-member] [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 64fe9acd-2f73-403b-b490-b6600bb258e3, please check neutron logs for more information. [ 765.383403] env[62096]: ERROR nova.compute.manager [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] Traceback (most recent call last): [ 765.383403] env[62096]: ERROR nova.compute.manager [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 765.383403] env[62096]: ERROR nova.compute.manager [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] self.driver.spawn(context, instance, image_meta, [ 765.383403] env[62096]: ERROR nova.compute.manager [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 765.383403] env[62096]: ERROR nova.compute.manager [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 765.383403] env[62096]: ERROR nova.compute.manager [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 765.383403] env[62096]: ERROR nova.compute.manager [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] vm_ref = self.build_virtual_machine(instance, [ 765.383403] env[62096]: ERROR nova.compute.manager [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 765.383403] env[62096]: ERROR nova.compute.manager [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] vif_infos = vmwarevif.get_vif_info(self._session, [ 765.383403] env[62096]: ERROR nova.compute.manager [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 765.383667] env[62096]: ERROR nova.compute.manager [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] for vif in network_info: [ 765.383667] env[62096]: ERROR nova.compute.manager [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 765.383667] env[62096]: ERROR nova.compute.manager [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] return self._sync_wrapper(fn, *args, **kwargs) [ 765.383667] env[62096]: ERROR nova.compute.manager [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 765.383667] env[62096]: ERROR nova.compute.manager [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] self.wait() [ 765.383667] env[62096]: ERROR nova.compute.manager [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 765.383667] env[62096]: ERROR nova.compute.manager [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] self[:] = self._gt.wait() [ 765.383667] env[62096]: ERROR nova.compute.manager [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 765.383667] env[62096]: ERROR nova.compute.manager [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] return self._exit_event.wait() [ 765.383667] env[62096]: ERROR nova.compute.manager [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 765.383667] env[62096]: ERROR nova.compute.manager [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] result = hub.switch() [ 765.383667] env[62096]: ERROR nova.compute.manager [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 765.383667] env[62096]: ERROR nova.compute.manager [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] return self.greenlet.switch() [ 765.383952] env[62096]: ERROR nova.compute.manager [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 765.383952] env[62096]: ERROR nova.compute.manager [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] result = function(*args, **kwargs) [ 765.383952] env[62096]: ERROR nova.compute.manager [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 765.383952] env[62096]: ERROR nova.compute.manager [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] return func(*args, **kwargs) [ 765.383952] env[62096]: ERROR nova.compute.manager [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 765.383952] env[62096]: ERROR nova.compute.manager [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] raise e [ 765.383952] env[62096]: ERROR nova.compute.manager [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 765.383952] env[62096]: ERROR nova.compute.manager [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] nwinfo = self.network_api.allocate_for_instance( [ 765.383952] env[62096]: ERROR nova.compute.manager [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 765.383952] env[62096]: ERROR nova.compute.manager [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] created_port_ids = self._update_ports_for_instance( [ 765.383952] env[62096]: ERROR nova.compute.manager [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 765.383952] env[62096]: ERROR nova.compute.manager [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] with excutils.save_and_reraise_exception(): [ 765.383952] env[62096]: ERROR nova.compute.manager [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 765.384293] env[62096]: ERROR nova.compute.manager [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] self.force_reraise() [ 765.384293] env[62096]: ERROR nova.compute.manager [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 765.384293] env[62096]: ERROR nova.compute.manager [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] raise self.value [ 765.384293] env[62096]: ERROR nova.compute.manager [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 765.384293] env[62096]: ERROR nova.compute.manager [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] updated_port = self._update_port( [ 765.384293] env[62096]: ERROR nova.compute.manager [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 765.384293] env[62096]: ERROR nova.compute.manager [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] _ensure_no_port_binding_failure(port) [ 765.384293] env[62096]: ERROR nova.compute.manager [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 765.384293] env[62096]: ERROR nova.compute.manager [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] raise exception.PortBindingFailed(port_id=port['id']) [ 765.384293] env[62096]: ERROR nova.compute.manager [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] nova.exception.PortBindingFailed: Binding failed for port 64fe9acd-2f73-403b-b490-b6600bb258e3, please check neutron logs for more information. [ 765.384293] env[62096]: ERROR nova.compute.manager [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] [ 765.384569] env[62096]: DEBUG nova.compute.utils [None req-d5f614b0-b494-4335-b00e-6b8de6a76cbe tempest-ServersWithSpecificFlavorTestJSON-68500714 tempest-ServersWithSpecificFlavorTestJSON-68500714-project-member] [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] Binding failed for port 64fe9acd-2f73-403b-b490-b6600bb258e3, please check neutron logs for more information. {{(pid=62096) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 765.385558] env[62096]: DEBUG oslo_concurrency.lockutils [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 15.464s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 765.385711] env[62096]: DEBUG oslo_concurrency.lockutils [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 765.385857] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62096) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 765.386159] env[62096]: DEBUG oslo_concurrency.lockutils [None req-a94ae5da-1291-4194-a0a7-fa8d95eb4821 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.140s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 765.391651] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a94ae5da-1291-4194-a0a7-fa8d95eb4821 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg 1b63eaffe6d142f9b3344840ab11d57f in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 765.393134] env[62096]: DEBUG nova.compute.manager [None req-d5f614b0-b494-4335-b00e-6b8de6a76cbe tempest-ServersWithSpecificFlavorTestJSON-68500714 tempest-ServersWithSpecificFlavorTestJSON-68500714-project-member] [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] Build of instance 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c was re-scheduled: Binding failed for port 64fe9acd-2f73-403b-b490-b6600bb258e3, please check neutron logs for more information. {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 765.393539] env[62096]: DEBUG nova.compute.manager [None req-d5f614b0-b494-4335-b00e-6b8de6a76cbe tempest-ServersWithSpecificFlavorTestJSON-68500714 tempest-ServersWithSpecificFlavorTestJSON-68500714-project-member] [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] Unplugging VIFs for instance {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 765.393783] env[62096]: DEBUG oslo_concurrency.lockutils [None req-d5f614b0-b494-4335-b00e-6b8de6a76cbe tempest-ServersWithSpecificFlavorTestJSON-68500714 tempest-ServersWithSpecificFlavorTestJSON-68500714-project-member] Acquiring lock "refresh_cache-0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 765.393930] env[62096]: DEBUG oslo_concurrency.lockutils [None req-d5f614b0-b494-4335-b00e-6b8de6a76cbe tempest-ServersWithSpecificFlavorTestJSON-68500714 tempest-ServersWithSpecificFlavorTestJSON-68500714-project-member] Acquired lock "refresh_cache-0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 765.394147] env[62096]: DEBUG nova.network.neutron [None req-d5f614b0-b494-4335-b00e-6b8de6a76cbe tempest-ServersWithSpecificFlavorTestJSON-68500714 tempest-ServersWithSpecificFlavorTestJSON-68500714-project-member] [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 765.394532] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d5f614b0-b494-4335-b00e-6b8de6a76cbe tempest-ServersWithSpecificFlavorTestJSON-68500714 tempest-ServersWithSpecificFlavorTestJSON-68500714-project-member] Expecting reply to msg ac4c0e38fe004f67a60754c052bc44dc in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 765.398395] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2f58454-b4e6-4ed0-add6-0b69b2eebf97 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.403651] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ac4c0e38fe004f67a60754c052bc44dc [ 765.411085] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33a3def5-45f2-45d6-967f-3b6d090c4d6f {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.432425] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f5df71a-f29a-4c83-bf25-d724962851d0 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.436288] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1b63eaffe6d142f9b3344840ab11d57f [ 765.441895] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9570031a-5d1f-496a-8390-f345e99720c2 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.473250] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181768MB free_disk=127GB free_vcpus=48 pci_devices=None {{(pid=62096) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 765.473432] env[62096]: DEBUG oslo_concurrency.lockutils [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 765.511233] env[62096]: DEBUG nova.compute.manager [req-ae27cea2-17f9-4811-9f4c-9c2450cea338 req-4a06081b-b92e-4ef4-bdbc-96f4ed0ddbf1 service nova] [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] Received event network-vif-deleted-8f78fa6f-374c-4405-b1b5-d31f92a1df08 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 765.675653] env[62096]: INFO nova.compute.manager [None req-fce0ab80-047e-4dc5-9e51-67218a6b1057 tempest-AttachInterfacesUnderV243Test-218968522 tempest-AttachInterfacesUnderV243Test-218968522-project-member] [instance: 59b37648-7b23-4ae3-90e6-867fbbde25df] Took 1.04 seconds to deallocate network for instance. [ 765.677566] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-fce0ab80-047e-4dc5-9e51-67218a6b1057 tempest-AttachInterfacesUnderV243Test-218968522 tempest-AttachInterfacesUnderV243Test-218968522-project-member] Expecting reply to msg f720f867b60640e1806043ac07f8e3b6 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 765.713044] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f720f867b60640e1806043ac07f8e3b6 [ 765.726151] env[62096]: INFO nova.compute.manager [-] [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] Took 1.04 seconds to deallocate network for instance. [ 765.728935] env[62096]: DEBUG nova.compute.claims [None req-8017d6f3-a8b6-4835-9973-b3f86aaa9630 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] Aborting claim: {{(pid=62096) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 765.729229] env[62096]: DEBUG oslo_concurrency.lockutils [None req-8017d6f3-a8b6-4835-9973-b3f86aaa9630 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 765.818444] env[62096]: DEBUG oslo_concurrency.lockutils [req-a5a7b5c3-c264-4dc8-8094-dde2031834ef req-9a1cb171-870c-4b98-8827-40053371d1da service nova] Releasing lock "refresh_cache-510189aa-204c-4fd6-90d5-47a7ce5f7630" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 765.922158] env[62096]: DEBUG nova.network.neutron [None req-d5f614b0-b494-4335-b00e-6b8de6a76cbe tempest-ServersWithSpecificFlavorTestJSON-68500714 tempest-ServersWithSpecificFlavorTestJSON-68500714-project-member] [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 766.043388] env[62096]: DEBUG nova.network.neutron [None req-d5f614b0-b494-4335-b00e-6b8de6a76cbe tempest-ServersWithSpecificFlavorTestJSON-68500714 tempest-ServersWithSpecificFlavorTestJSON-68500714-project-member] [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 766.043889] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d5f614b0-b494-4335-b00e-6b8de6a76cbe tempest-ServersWithSpecificFlavorTestJSON-68500714 tempest-ServersWithSpecificFlavorTestJSON-68500714-project-member] Expecting reply to msg d3d153ae2ef2420895a717acbc4f1b48 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 766.052180] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d3d153ae2ef2420895a717acbc4f1b48 [ 766.185378] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-fce0ab80-047e-4dc5-9e51-67218a6b1057 tempest-AttachInterfacesUnderV243Test-218968522 tempest-AttachInterfacesUnderV243Test-218968522-project-member] Expecting reply to msg 5f44df403e3d4a6e9cb61a51a2487049 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 766.222168] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5f44df403e3d4a6e9cb61a51a2487049 [ 766.273273] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd5ded4f-45ac-4385-a2b4-d9a442eb8b1f {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.281054] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d39e312-efe8-4977-9dff-922df51a2aa7 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.313479] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dc88a7e-9fae-478f-b23a-b738014e15a6 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.329717] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1d42ec1-45bb-4a43-8c9c-9c6c1f1ed73e {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.344671] env[62096]: DEBUG nova.compute.provider_tree [None req-a94ae5da-1291-4194-a0a7-fa8d95eb4821 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 766.345387] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a94ae5da-1291-4194-a0a7-fa8d95eb4821 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg ed71b0cc69854f44936fa5b2b3070e85 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 766.353499] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ed71b0cc69854f44936fa5b2b3070e85 [ 766.545686] env[62096]: DEBUG oslo_concurrency.lockutils [None req-d5f614b0-b494-4335-b00e-6b8de6a76cbe tempest-ServersWithSpecificFlavorTestJSON-68500714 tempest-ServersWithSpecificFlavorTestJSON-68500714-project-member] Releasing lock "refresh_cache-0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 766.545909] env[62096]: DEBUG nova.compute.manager [None req-d5f614b0-b494-4335-b00e-6b8de6a76cbe tempest-ServersWithSpecificFlavorTestJSON-68500714 tempest-ServersWithSpecificFlavorTestJSON-68500714-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 766.546092] env[62096]: DEBUG nova.compute.manager [None req-d5f614b0-b494-4335-b00e-6b8de6a76cbe tempest-ServersWithSpecificFlavorTestJSON-68500714 tempest-ServersWithSpecificFlavorTestJSON-68500714-project-member] [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 766.546250] env[62096]: DEBUG nova.network.neutron [None req-d5f614b0-b494-4335-b00e-6b8de6a76cbe tempest-ServersWithSpecificFlavorTestJSON-68500714 tempest-ServersWithSpecificFlavorTestJSON-68500714-project-member] [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 766.567192] env[62096]: DEBUG nova.network.neutron [None req-d5f614b0-b494-4335-b00e-6b8de6a76cbe tempest-ServersWithSpecificFlavorTestJSON-68500714 tempest-ServersWithSpecificFlavorTestJSON-68500714-project-member] [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 766.567744] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d5f614b0-b494-4335-b00e-6b8de6a76cbe tempest-ServersWithSpecificFlavorTestJSON-68500714 tempest-ServersWithSpecificFlavorTestJSON-68500714-project-member] Expecting reply to msg 84df98d2f29c4df1aed349f6ef7612cc in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 766.575067] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 84df98d2f29c4df1aed349f6ef7612cc [ 766.708750] env[62096]: INFO nova.scheduler.client.report [None req-fce0ab80-047e-4dc5-9e51-67218a6b1057 tempest-AttachInterfacesUnderV243Test-218968522 tempest-AttachInterfacesUnderV243Test-218968522-project-member] Deleted allocations for instance 59b37648-7b23-4ae3-90e6-867fbbde25df [ 766.715247] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-fce0ab80-047e-4dc5-9e51-67218a6b1057 tempest-AttachInterfacesUnderV243Test-218968522 tempest-AttachInterfacesUnderV243Test-218968522-project-member] Expecting reply to msg 6577cb1a57504ff58240520c402c068e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 766.741153] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6577cb1a57504ff58240520c402c068e [ 766.848357] env[62096]: DEBUG nova.scheduler.client.report [None req-a94ae5da-1291-4194-a0a7-fa8d95eb4821 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 766.850925] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a94ae5da-1291-4194-a0a7-fa8d95eb4821 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg 14511f070e6e456b8fe69bf4859802e9 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 766.862513] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 14511f070e6e456b8fe69bf4859802e9 [ 767.069879] env[62096]: DEBUG nova.network.neutron [None req-d5f614b0-b494-4335-b00e-6b8de6a76cbe tempest-ServersWithSpecificFlavorTestJSON-68500714 tempest-ServersWithSpecificFlavorTestJSON-68500714-project-member] [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 767.070444] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d5f614b0-b494-4335-b00e-6b8de6a76cbe tempest-ServersWithSpecificFlavorTestJSON-68500714 tempest-ServersWithSpecificFlavorTestJSON-68500714-project-member] Expecting reply to msg b2daf4d2875a4ac68a0ed5cd27a1e1ea in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 767.078863] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b2daf4d2875a4ac68a0ed5cd27a1e1ea [ 767.217757] env[62096]: DEBUG oslo_concurrency.lockutils [None req-fce0ab80-047e-4dc5-9e51-67218a6b1057 tempest-AttachInterfacesUnderV243Test-218968522 tempest-AttachInterfacesUnderV243Test-218968522-project-member] Lock "59b37648-7b23-4ae3-90e6-867fbbde25df" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 153.021s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 767.218335] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-2fec121e-bb5d-41f2-9f13-5023c9def8ac tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Expecting reply to msg c5fc6a7502a5454c8e9411aec19ce284 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 767.240593] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c5fc6a7502a5454c8e9411aec19ce284 [ 767.360289] env[62096]: DEBUG oslo_concurrency.lockutils [None req-a94ae5da-1291-4194-a0a7-fa8d95eb4821 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.968s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 767.360289] env[62096]: ERROR nova.compute.manager [None req-a94ae5da-1291-4194-a0a7-fa8d95eb4821 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 4055f37b-0c31-4752-8dba-88f3373f6782, please check neutron logs for more information. [ 767.360289] env[62096]: ERROR nova.compute.manager [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] Traceback (most recent call last): [ 767.360289] env[62096]: ERROR nova.compute.manager [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 767.360289] env[62096]: ERROR nova.compute.manager [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] self.driver.spawn(context, instance, image_meta, [ 767.360289] env[62096]: ERROR nova.compute.manager [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 767.360289] env[62096]: ERROR nova.compute.manager [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] self._vmops.spawn(context, instance, image_meta, injected_files, [ 767.360289] env[62096]: ERROR nova.compute.manager [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 767.360289] env[62096]: ERROR nova.compute.manager [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] vm_ref = self.build_virtual_machine(instance, [ 767.360543] env[62096]: ERROR nova.compute.manager [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 767.360543] env[62096]: ERROR nova.compute.manager [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] vif_infos = vmwarevif.get_vif_info(self._session, [ 767.360543] env[62096]: ERROR nova.compute.manager [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 767.360543] env[62096]: ERROR nova.compute.manager [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] for vif in network_info: [ 767.360543] env[62096]: ERROR nova.compute.manager [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 767.360543] env[62096]: ERROR nova.compute.manager [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] return self._sync_wrapper(fn, *args, **kwargs) [ 767.360543] env[62096]: ERROR nova.compute.manager [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 767.360543] env[62096]: ERROR nova.compute.manager [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] self.wait() [ 767.360543] env[62096]: ERROR nova.compute.manager [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 767.360543] env[62096]: ERROR nova.compute.manager [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] self[:] = self._gt.wait() [ 767.360543] env[62096]: ERROR nova.compute.manager [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 767.360543] env[62096]: ERROR nova.compute.manager [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] return self._exit_event.wait() [ 767.360543] env[62096]: ERROR nova.compute.manager [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 767.360880] env[62096]: ERROR nova.compute.manager [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] result = hub.switch() [ 767.360880] env[62096]: ERROR nova.compute.manager [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 767.360880] env[62096]: ERROR nova.compute.manager [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] return self.greenlet.switch() [ 767.360880] env[62096]: ERROR nova.compute.manager [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 767.360880] env[62096]: ERROR nova.compute.manager [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] result = function(*args, **kwargs) [ 767.360880] env[62096]: ERROR nova.compute.manager [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 767.360880] env[62096]: ERROR nova.compute.manager [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] return func(*args, **kwargs) [ 767.360880] env[62096]: ERROR nova.compute.manager [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 767.360880] env[62096]: ERROR nova.compute.manager [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] raise e [ 767.360880] env[62096]: ERROR nova.compute.manager [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 767.360880] env[62096]: ERROR nova.compute.manager [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] nwinfo = self.network_api.allocate_for_instance( [ 767.360880] env[62096]: ERROR nova.compute.manager [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 767.360880] env[62096]: ERROR nova.compute.manager [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] created_port_ids = self._update_ports_for_instance( [ 767.361211] env[62096]: ERROR nova.compute.manager [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 767.361211] env[62096]: ERROR nova.compute.manager [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] with excutils.save_and_reraise_exception(): [ 767.361211] env[62096]: ERROR nova.compute.manager [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 767.361211] env[62096]: ERROR nova.compute.manager [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] self.force_reraise() [ 767.361211] env[62096]: ERROR nova.compute.manager [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 767.361211] env[62096]: ERROR nova.compute.manager [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] raise self.value [ 767.361211] env[62096]: ERROR nova.compute.manager [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 767.361211] env[62096]: ERROR nova.compute.manager [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] updated_port = self._update_port( [ 767.361211] env[62096]: ERROR nova.compute.manager [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 767.361211] env[62096]: ERROR nova.compute.manager [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] _ensure_no_port_binding_failure(port) [ 767.361211] env[62096]: ERROR nova.compute.manager [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 767.361211] env[62096]: ERROR nova.compute.manager [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] raise exception.PortBindingFailed(port_id=port['id']) [ 767.361518] env[62096]: ERROR nova.compute.manager [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] nova.exception.PortBindingFailed: Binding failed for port 4055f37b-0c31-4752-8dba-88f3373f6782, please check neutron logs for more information. [ 767.361518] env[62096]: ERROR nova.compute.manager [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] [ 767.361518] env[62096]: DEBUG nova.compute.utils [None req-a94ae5da-1291-4194-a0a7-fa8d95eb4821 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] Binding failed for port 4055f37b-0c31-4752-8dba-88f3373f6782, please check neutron logs for more information. {{(pid=62096) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 767.361518] env[62096]: DEBUG oslo_concurrency.lockutils [None req-f0b7d82a-4b7b-42e3-8d2d-3e9d2e37aade tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.194s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 767.361518] env[62096]: INFO nova.compute.claims [None req-f0b7d82a-4b7b-42e3-8d2d-3e9d2e37aade tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 767.362864] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-f0b7d82a-4b7b-42e3-8d2d-3e9d2e37aade tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg 94d5f40ca32f444ba0a6d9809dfd318f in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 767.365176] env[62096]: DEBUG nova.compute.manager [None req-a94ae5da-1291-4194-a0a7-fa8d95eb4821 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] Build of instance dafff089-f6e4-4269-bf0e-ea305c11ff36 was re-scheduled: Binding failed for port 4055f37b-0c31-4752-8dba-88f3373f6782, please check neutron logs for more information. {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 767.366607] env[62096]: DEBUG nova.compute.manager [None req-a94ae5da-1291-4194-a0a7-fa8d95eb4821 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] Unplugging VIFs for instance {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 767.367066] env[62096]: DEBUG oslo_concurrency.lockutils [None req-a94ae5da-1291-4194-a0a7-fa8d95eb4821 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Acquiring lock "refresh_cache-dafff089-f6e4-4269-bf0e-ea305c11ff36" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 767.367364] env[62096]: DEBUG oslo_concurrency.lockutils [None req-a94ae5da-1291-4194-a0a7-fa8d95eb4821 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Acquired lock "refresh_cache-dafff089-f6e4-4269-bf0e-ea305c11ff36" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 767.367672] env[62096]: DEBUG nova.network.neutron [None req-a94ae5da-1291-4194-a0a7-fa8d95eb4821 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 767.368270] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a94ae5da-1291-4194-a0a7-fa8d95eb4821 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg fbf5e1cfe16740b3921207cc37a3d7aa in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 767.374485] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fbf5e1cfe16740b3921207cc37a3d7aa [ 767.407974] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 94d5f40ca32f444ba0a6d9809dfd318f [ 767.573368] env[62096]: INFO nova.compute.manager [None req-d5f614b0-b494-4335-b00e-6b8de6a76cbe tempest-ServersWithSpecificFlavorTestJSON-68500714 tempest-ServersWithSpecificFlavorTestJSON-68500714-project-member] [instance: 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c] Took 1.03 seconds to deallocate network for instance. [ 767.575502] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d5f614b0-b494-4335-b00e-6b8de6a76cbe tempest-ServersWithSpecificFlavorTestJSON-68500714 tempest-ServersWithSpecificFlavorTestJSON-68500714-project-member] Expecting reply to msg 016c48cf42c748e5b8d0324224287d62 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 767.611077] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 016c48cf42c748e5b8d0324224287d62 [ 767.724041] env[62096]: DEBUG nova.compute.manager [None req-2fec121e-bb5d-41f2-9f13-5023c9def8ac tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] [instance: f050f0de-dc84-4825-b490-eafe522354cd] Starting instance... {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 767.724041] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-2fec121e-bb5d-41f2-9f13-5023c9def8ac tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Expecting reply to msg 305fbf9c4d34466a9f2de4fb7ca46573 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 767.766862] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 305fbf9c4d34466a9f2de4fb7ca46573 [ 767.868397] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-f0b7d82a-4b7b-42e3-8d2d-3e9d2e37aade tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg ec8e20ced5b149f2b5e7110ab9c9e03e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 767.878565] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ec8e20ced5b149f2b5e7110ab9c9e03e [ 767.886997] env[62096]: DEBUG nova.network.neutron [None req-a94ae5da-1291-4194-a0a7-fa8d95eb4821 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 768.016419] env[62096]: DEBUG nova.network.neutron [None req-a94ae5da-1291-4194-a0a7-fa8d95eb4821 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 768.016419] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a94ae5da-1291-4194-a0a7-fa8d95eb4821 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg 471fec0c133d448da0b99aa37719d626 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 768.028019] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 471fec0c133d448da0b99aa37719d626 [ 768.080800] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d5f614b0-b494-4335-b00e-6b8de6a76cbe tempest-ServersWithSpecificFlavorTestJSON-68500714 tempest-ServersWithSpecificFlavorTestJSON-68500714-project-member] Expecting reply to msg eaf21da642ff401eb3601eefa828bac4 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 768.128358] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg eaf21da642ff401eb3601eefa828bac4 [ 768.251739] env[62096]: DEBUG oslo_concurrency.lockutils [None req-2fec121e-bb5d-41f2-9f13-5023c9def8ac tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 768.518054] env[62096]: DEBUG oslo_concurrency.lockutils [None req-a94ae5da-1291-4194-a0a7-fa8d95eb4821 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Releasing lock "refresh_cache-dafff089-f6e4-4269-bf0e-ea305c11ff36" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 768.518320] env[62096]: DEBUG nova.compute.manager [None req-a94ae5da-1291-4194-a0a7-fa8d95eb4821 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 768.518503] env[62096]: DEBUG nova.compute.manager [None req-a94ae5da-1291-4194-a0a7-fa8d95eb4821 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 768.518662] env[62096]: DEBUG nova.network.neutron [None req-a94ae5da-1291-4194-a0a7-fa8d95eb4821 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 768.550349] env[62096]: DEBUG nova.network.neutron [None req-a94ae5da-1291-4194-a0a7-fa8d95eb4821 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 768.550906] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a94ae5da-1291-4194-a0a7-fa8d95eb4821 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg d103b90d09e04bf3a40362f70b0de417 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 768.586386] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d103b90d09e04bf3a40362f70b0de417 [ 768.612699] env[62096]: INFO nova.scheduler.client.report [None req-d5f614b0-b494-4335-b00e-6b8de6a76cbe tempest-ServersWithSpecificFlavorTestJSON-68500714 tempest-ServersWithSpecificFlavorTestJSON-68500714-project-member] Deleted allocations for instance 0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c [ 768.620522] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d5f614b0-b494-4335-b00e-6b8de6a76cbe tempest-ServersWithSpecificFlavorTestJSON-68500714 tempest-ServersWithSpecificFlavorTestJSON-68500714-project-member] Expecting reply to msg e0a8a3ccef214e208a608bb8cf6d0fe5 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 768.633228] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e0a8a3ccef214e208a608bb8cf6d0fe5 [ 768.815995] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7666163-543f-4a1c-9e35-94faa74e687f {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.824092] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4da61665-c79d-4ee0-b089-b02315e782bd {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.862327] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a71298e-057d-4c35-a676-09952776b687 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.870107] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19c80c35-49ca-4602-9e8d-2a34da764cf9 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.884422] env[62096]: DEBUG nova.compute.provider_tree [None req-f0b7d82a-4b7b-42e3-8d2d-3e9d2e37aade tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 768.884896] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-f0b7d82a-4b7b-42e3-8d2d-3e9d2e37aade tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg f4f6d91cca394644a876b0b0282c5c62 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 768.892870] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f4f6d91cca394644a876b0b0282c5c62 [ 769.052696] env[62096]: DEBUG nova.network.neutron [None req-a94ae5da-1291-4194-a0a7-fa8d95eb4821 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 769.053239] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a94ae5da-1291-4194-a0a7-fa8d95eb4821 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg 861664a848c84f348141f5ae4b13bcb4 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 769.066203] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 861664a848c84f348141f5ae4b13bcb4 [ 769.122404] env[62096]: DEBUG oslo_concurrency.lockutils [None req-d5f614b0-b494-4335-b00e-6b8de6a76cbe tempest-ServersWithSpecificFlavorTestJSON-68500714 tempest-ServersWithSpecificFlavorTestJSON-68500714-project-member] Lock "0b1a8f0b-70d8-4d30-a857-09a6ebe1f95c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 150.883s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 769.122999] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-35a36660-23cc-4f3d-b53e-6fe46ab70d44 tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Expecting reply to msg 4c9113da72a740ae97ad0ced55e7eb02 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 769.145550] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4c9113da72a740ae97ad0ced55e7eb02 [ 769.388102] env[62096]: DEBUG nova.scheduler.client.report [None req-f0b7d82a-4b7b-42e3-8d2d-3e9d2e37aade tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 769.390615] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-f0b7d82a-4b7b-42e3-8d2d-3e9d2e37aade tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg 1f7fd570e69e4b429c44fb252eb92555 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 769.402118] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1f7fd570e69e4b429c44fb252eb92555 [ 769.555935] env[62096]: INFO nova.compute.manager [None req-a94ae5da-1291-4194-a0a7-fa8d95eb4821 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: dafff089-f6e4-4269-bf0e-ea305c11ff36] Took 1.04 seconds to deallocate network for instance. [ 769.557865] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a94ae5da-1291-4194-a0a7-fa8d95eb4821 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg 3b0f08c3ab1440a492169489c79e0d58 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 769.599042] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3b0f08c3ab1440a492169489c79e0d58 [ 769.625774] env[62096]: DEBUG nova.compute.manager [None req-35a36660-23cc-4f3d-b53e-6fe46ab70d44 tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] Starting instance... {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 769.627740] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-35a36660-23cc-4f3d-b53e-6fe46ab70d44 tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Expecting reply to msg d209345d70b240088565f0aab8309b62 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 769.660992] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d209345d70b240088565f0aab8309b62 [ 769.892974] env[62096]: DEBUG oslo_concurrency.lockutils [None req-f0b7d82a-4b7b-42e3-8d2d-3e9d2e37aade tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.535s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 769.893525] env[62096]: DEBUG nova.compute.manager [None req-f0b7d82a-4b7b-42e3-8d2d-3e9d2e37aade tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] Start building networks asynchronously for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 769.895320] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-f0b7d82a-4b7b-42e3-8d2d-3e9d2e37aade tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg 418fd5f8ef9b47958fceb796cb3de45d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 769.896426] env[62096]: DEBUG oslo_concurrency.lockutils [None req-36b7f3a2-925c-4aea-b829-50ed82eff8b4 tempest-ImagesNegativeTestJSON-1271305456 tempest-ImagesNegativeTestJSON-1271305456-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.095s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 769.906761] env[62096]: INFO nova.compute.claims [None req-36b7f3a2-925c-4aea-b829-50ed82eff8b4 tempest-ImagesNegativeTestJSON-1271305456 tempest-ImagesNegativeTestJSON-1271305456-project-member] [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 769.906761] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-36b7f3a2-925c-4aea-b829-50ed82eff8b4 tempest-ImagesNegativeTestJSON-1271305456 tempest-ImagesNegativeTestJSON-1271305456-project-member] Expecting reply to msg b245ddc4fbb949bc9d96d79da8199fbc in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 769.937983] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 418fd5f8ef9b47958fceb796cb3de45d [ 769.939247] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b245ddc4fbb949bc9d96d79da8199fbc [ 770.072833] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a94ae5da-1291-4194-a0a7-fa8d95eb4821 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg 26b33b9fe9d349248eacd33803b6903c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 770.111322] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 26b33b9fe9d349248eacd33803b6903c [ 770.150713] env[62096]: DEBUG oslo_concurrency.lockutils [None req-35a36660-23cc-4f3d-b53e-6fe46ab70d44 tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 770.404229] env[62096]: DEBUG nova.compute.utils [None req-f0b7d82a-4b7b-42e3-8d2d-3e9d2e37aade tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Using /dev/sd instead of None {{(pid=62096) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 770.404951] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-f0b7d82a-4b7b-42e3-8d2d-3e9d2e37aade tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg 0aff03a3da4b44c5a2c6e857f23d8645 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 770.406926] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-36b7f3a2-925c-4aea-b829-50ed82eff8b4 tempest-ImagesNegativeTestJSON-1271305456 tempest-ImagesNegativeTestJSON-1271305456-project-member] Expecting reply to msg a75ce86cec3f474e9fe6bc5c3e7e92de in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 770.407768] env[62096]: DEBUG nova.compute.manager [None req-f0b7d82a-4b7b-42e3-8d2d-3e9d2e37aade tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] Allocating IP information in the background. {{(pid=62096) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 770.407891] env[62096]: DEBUG nova.network.neutron [None req-f0b7d82a-4b7b-42e3-8d2d-3e9d2e37aade tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] allocate_for_instance() {{(pid=62096) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 770.417545] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a75ce86cec3f474e9fe6bc5c3e7e92de [ 770.424090] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0aff03a3da4b44c5a2c6e857f23d8645 [ 770.449053] env[62096]: DEBUG nova.policy [None req-f0b7d82a-4b7b-42e3-8d2d-3e9d2e37aade tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6d00589b1aa24dd7beb7c3ac5cb2a8ca', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bffafc9dc83d477d823cd7364968f48a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62096) authorize /opt/stack/nova/nova/policy.py:203}} [ 770.600048] env[62096]: INFO nova.scheduler.client.report [None req-a94ae5da-1291-4194-a0a7-fa8d95eb4821 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Deleted allocations for instance dafff089-f6e4-4269-bf0e-ea305c11ff36 [ 770.604874] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a94ae5da-1291-4194-a0a7-fa8d95eb4821 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg 0ea2164a8bfe45b982c05908031eea68 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 770.625926] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0ea2164a8bfe45b982c05908031eea68 [ 770.916925] env[62096]: DEBUG nova.compute.manager [None req-f0b7d82a-4b7b-42e3-8d2d-3e9d2e37aade tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] Start building block device mappings for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 770.916925] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-f0b7d82a-4b7b-42e3-8d2d-3e9d2e37aade tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg a97c4e9f9535400e993fb0c0da54c8e4 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 770.967949] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a97c4e9f9535400e993fb0c0da54c8e4 [ 771.113361] env[62096]: DEBUG oslo_concurrency.lockutils [None req-a94ae5da-1291-4194-a0a7-fa8d95eb4821 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Lock "dafff089-f6e4-4269-bf0e-ea305c11ff36" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 148.350s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 771.113945] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-534ea4b7-f1b9-4ff7-be62-1798f34a685d tempest-ServerAddressesTestJSON-1842394723 tempest-ServerAddressesTestJSON-1842394723-project-member] Expecting reply to msg f4969ed1f745463f994eecb5d7a276a3 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 771.126669] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f4969ed1f745463f994eecb5d7a276a3 [ 771.188123] env[62096]: DEBUG nova.network.neutron [None req-f0b7d82a-4b7b-42e3-8d2d-3e9d2e37aade tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] Successfully created port: f4e2ec88-4a20-4948-93f1-f9ff4ba04d70 {{(pid=62096) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 771.280944] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f85e74c-018e-498f-9b24-d60843313271 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.288303] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c73dd104-2f16-42b7-a0b8-010c04187bdc {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.329948] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-532ca4dc-95fd-43cb-b233-eaf93783047a {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.337307] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc400c87-dbf3-4ae6-8bed-9f48a3168e02 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.357882] env[62096]: DEBUG nova.compute.provider_tree [None req-36b7f3a2-925c-4aea-b829-50ed82eff8b4 tempest-ImagesNegativeTestJSON-1271305456 tempest-ImagesNegativeTestJSON-1271305456-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 771.358459] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-36b7f3a2-925c-4aea-b829-50ed82eff8b4 tempest-ImagesNegativeTestJSON-1271305456 tempest-ImagesNegativeTestJSON-1271305456-project-member] Expecting reply to msg 186f006caae84eb8b8bd35a345591951 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 771.365046] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 186f006caae84eb8b8bd35a345591951 [ 771.428884] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-f0b7d82a-4b7b-42e3-8d2d-3e9d2e37aade tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg 7d7c565fefce4967aac8ce6296e3195d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 771.462118] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7d7c565fefce4967aac8ce6296e3195d [ 771.618141] env[62096]: DEBUG nova.compute.manager [None req-534ea4b7-f1b9-4ff7-be62-1798f34a685d tempest-ServerAddressesTestJSON-1842394723 tempest-ServerAddressesTestJSON-1842394723-project-member] [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] Starting instance... {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 771.619845] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-534ea4b7-f1b9-4ff7-be62-1798f34a685d tempest-ServerAddressesTestJSON-1842394723 tempest-ServerAddressesTestJSON-1842394723-project-member] Expecting reply to msg 70e95c7bf66c479ea248bddef27c0a20 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 771.653607] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 70e95c7bf66c479ea248bddef27c0a20 [ 771.802565] env[62096]: DEBUG oslo_concurrency.lockutils [None req-9602e4c9-801d-4ec4-85d8-84d6ec45cb64 tempest-ServersTestJSON-242390470 tempest-ServersTestJSON-242390470-project-member] Acquiring lock "5c04dded-d1c9-44fe-bf2f-de295d21b725" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 771.802799] env[62096]: DEBUG oslo_concurrency.lockutils [None req-9602e4c9-801d-4ec4-85d8-84d6ec45cb64 tempest-ServersTestJSON-242390470 tempest-ServersTestJSON-242390470-project-member] Lock "5c04dded-d1c9-44fe-bf2f-de295d21b725" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 771.860877] env[62096]: DEBUG nova.scheduler.client.report [None req-36b7f3a2-925c-4aea-b829-50ed82eff8b4 tempest-ImagesNegativeTestJSON-1271305456 tempest-ImagesNegativeTestJSON-1271305456-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 771.863299] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-36b7f3a2-925c-4aea-b829-50ed82eff8b4 tempest-ImagesNegativeTestJSON-1271305456 tempest-ImagesNegativeTestJSON-1271305456-project-member] Expecting reply to msg 5ca9a896692b40829175a6da28b7cb30 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 771.881660] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5ca9a896692b40829175a6da28b7cb30 [ 771.931562] env[62096]: DEBUG nova.compute.manager [None req-f0b7d82a-4b7b-42e3-8d2d-3e9d2e37aade tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] Start spawning the instance on the hypervisor. {{(pid=62096) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 771.970198] env[62096]: DEBUG nova.virt.hardware [None req-f0b7d82a-4b7b-42e3-8d2d-3e9d2e37aade tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-19T11:52:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-19T11:51:55Z,direct_url=,disk_format='vmdk',id=fb764baa-3805-45c4-a694-aa91b0932110,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bd76085d004b408bbcf1ebf76ba2593e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-19T11:51:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 771.970452] env[62096]: DEBUG nova.virt.hardware [None req-f0b7d82a-4b7b-42e3-8d2d-3e9d2e37aade tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Flavor limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 771.970613] env[62096]: DEBUG nova.virt.hardware [None req-f0b7d82a-4b7b-42e3-8d2d-3e9d2e37aade tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Image limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 771.970804] env[62096]: DEBUG nova.virt.hardware [None req-f0b7d82a-4b7b-42e3-8d2d-3e9d2e37aade tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Flavor pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 771.970947] env[62096]: DEBUG nova.virt.hardware [None req-f0b7d82a-4b7b-42e3-8d2d-3e9d2e37aade tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Image pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 771.971091] env[62096]: DEBUG nova.virt.hardware [None req-f0b7d82a-4b7b-42e3-8d2d-3e9d2e37aade tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 771.971308] env[62096]: DEBUG nova.virt.hardware [None req-f0b7d82a-4b7b-42e3-8d2d-3e9d2e37aade tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 771.971482] env[62096]: DEBUG nova.virt.hardware [None req-f0b7d82a-4b7b-42e3-8d2d-3e9d2e37aade tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 771.971639] env[62096]: DEBUG nova.virt.hardware [None req-f0b7d82a-4b7b-42e3-8d2d-3e9d2e37aade tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Got 1 possible topologies {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 771.971953] env[62096]: DEBUG nova.virt.hardware [None req-f0b7d82a-4b7b-42e3-8d2d-3e9d2e37aade tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 771.972168] env[62096]: DEBUG nova.virt.hardware [None req-f0b7d82a-4b7b-42e3-8d2d-3e9d2e37aade tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 771.973224] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf4bfeed-c364-4819-bf7e-b897a78ba549 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.981928] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-897f70ee-7b99-4d2e-9740-d2a31500537f {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.149074] env[62096]: DEBUG oslo_concurrency.lockutils [None req-534ea4b7-f1b9-4ff7-be62-1798f34a685d tempest-ServerAddressesTestJSON-1842394723 tempest-ServerAddressesTestJSON-1842394723-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 772.366654] env[62096]: DEBUG oslo_concurrency.lockutils [None req-36b7f3a2-925c-4aea-b829-50ed82eff8b4 tempest-ImagesNegativeTestJSON-1271305456 tempest-ImagesNegativeTestJSON-1271305456-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.470s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 772.367160] env[62096]: DEBUG nova.compute.manager [None req-36b7f3a2-925c-4aea-b829-50ed82eff8b4 tempest-ImagesNegativeTestJSON-1271305456 tempest-ImagesNegativeTestJSON-1271305456-project-member] [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] Start building networks asynchronously for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 772.368863] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-36b7f3a2-925c-4aea-b829-50ed82eff8b4 tempest-ImagesNegativeTestJSON-1271305456 tempest-ImagesNegativeTestJSON-1271305456-project-member] Expecting reply to msg 345fbe14004c4db18e9a4fe40c911984 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 772.369848] env[62096]: DEBUG oslo_concurrency.lockutils [None req-f7b2ac44-6793-4b58-b560-5a800041e557 tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.044s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 772.371528] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-f7b2ac44-6793-4b58-b560-5a800041e557 tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Expecting reply to msg a697ff1bf56a42919fa78b3f2b5b8239 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 772.412342] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a697ff1bf56a42919fa78b3f2b5b8239 [ 772.415328] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 345fbe14004c4db18e9a4fe40c911984 [ 772.475826] env[62096]: ERROR nova.compute.manager [None req-f0b7d82a-4b7b-42e3-8d2d-3e9d2e37aade tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port f4e2ec88-4a20-4948-93f1-f9ff4ba04d70, please check neutron logs for more information. [ 772.475826] env[62096]: ERROR nova.compute.manager Traceback (most recent call last): [ 772.475826] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 772.475826] env[62096]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 772.475826] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 772.475826] env[62096]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 772.475826] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 772.475826] env[62096]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 772.475826] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 772.475826] env[62096]: ERROR nova.compute.manager self.force_reraise() [ 772.475826] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 772.475826] env[62096]: ERROR nova.compute.manager raise self.value [ 772.475826] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 772.475826] env[62096]: ERROR nova.compute.manager updated_port = self._update_port( [ 772.475826] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 772.475826] env[62096]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 772.476517] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 772.476517] env[62096]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 772.476517] env[62096]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port f4e2ec88-4a20-4948-93f1-f9ff4ba04d70, please check neutron logs for more information. [ 772.476517] env[62096]: ERROR nova.compute.manager [ 772.476517] env[62096]: Traceback (most recent call last): [ 772.476517] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 772.476517] env[62096]: listener.cb(fileno) [ 772.476517] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 772.476517] env[62096]: result = function(*args, **kwargs) [ 772.476517] env[62096]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 772.476517] env[62096]: return func(*args, **kwargs) [ 772.476517] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 772.476517] env[62096]: raise e [ 772.476517] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 772.476517] env[62096]: nwinfo = self.network_api.allocate_for_instance( [ 772.476517] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 772.476517] env[62096]: created_port_ids = self._update_ports_for_instance( [ 772.476517] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 772.476517] env[62096]: with excutils.save_and_reraise_exception(): [ 772.476517] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 772.476517] env[62096]: self.force_reraise() [ 772.476517] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 772.476517] env[62096]: raise self.value [ 772.476517] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 772.476517] env[62096]: updated_port = self._update_port( [ 772.476517] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 772.476517] env[62096]: _ensure_no_port_binding_failure(port) [ 772.476517] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 772.476517] env[62096]: raise exception.PortBindingFailed(port_id=port['id']) [ 772.478050] env[62096]: nova.exception.PortBindingFailed: Binding failed for port f4e2ec88-4a20-4948-93f1-f9ff4ba04d70, please check neutron logs for more information. [ 772.478050] env[62096]: Removing descriptor: 19 [ 772.478050] env[62096]: ERROR nova.compute.manager [None req-f0b7d82a-4b7b-42e3-8d2d-3e9d2e37aade tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port f4e2ec88-4a20-4948-93f1-f9ff4ba04d70, please check neutron logs for more information. [ 772.478050] env[62096]: ERROR nova.compute.manager [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] Traceback (most recent call last): [ 772.478050] env[62096]: ERROR nova.compute.manager [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 772.478050] env[62096]: ERROR nova.compute.manager [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] yield resources [ 772.478050] env[62096]: ERROR nova.compute.manager [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 772.478050] env[62096]: ERROR nova.compute.manager [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] self.driver.spawn(context, instance, image_meta, [ 772.478050] env[62096]: ERROR nova.compute.manager [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 772.478050] env[62096]: ERROR nova.compute.manager [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 772.478050] env[62096]: ERROR nova.compute.manager [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 772.478050] env[62096]: ERROR nova.compute.manager [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] vm_ref = self.build_virtual_machine(instance, [ 772.478411] env[62096]: ERROR nova.compute.manager [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 772.478411] env[62096]: ERROR nova.compute.manager [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] vif_infos = vmwarevif.get_vif_info(self._session, [ 772.478411] env[62096]: ERROR nova.compute.manager [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 772.478411] env[62096]: ERROR nova.compute.manager [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] for vif in network_info: [ 772.478411] env[62096]: ERROR nova.compute.manager [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 772.478411] env[62096]: ERROR nova.compute.manager [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] return self._sync_wrapper(fn, *args, **kwargs) [ 772.478411] env[62096]: ERROR nova.compute.manager [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 772.478411] env[62096]: ERROR nova.compute.manager [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] self.wait() [ 772.478411] env[62096]: ERROR nova.compute.manager [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 772.478411] env[62096]: ERROR nova.compute.manager [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] self[:] = self._gt.wait() [ 772.478411] env[62096]: ERROR nova.compute.manager [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 772.478411] env[62096]: ERROR nova.compute.manager [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] return self._exit_event.wait() [ 772.478411] env[62096]: ERROR nova.compute.manager [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 772.478747] env[62096]: ERROR nova.compute.manager [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] result = hub.switch() [ 772.478747] env[62096]: ERROR nova.compute.manager [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 772.478747] env[62096]: ERROR nova.compute.manager [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] return self.greenlet.switch() [ 772.478747] env[62096]: ERROR nova.compute.manager [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 772.478747] env[62096]: ERROR nova.compute.manager [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] result = function(*args, **kwargs) [ 772.478747] env[62096]: ERROR nova.compute.manager [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 772.478747] env[62096]: ERROR nova.compute.manager [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] return func(*args, **kwargs) [ 772.478747] env[62096]: ERROR nova.compute.manager [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 772.478747] env[62096]: ERROR nova.compute.manager [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] raise e [ 772.478747] env[62096]: ERROR nova.compute.manager [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 772.478747] env[62096]: ERROR nova.compute.manager [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] nwinfo = self.network_api.allocate_for_instance( [ 772.478747] env[62096]: ERROR nova.compute.manager [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 772.478747] env[62096]: ERROR nova.compute.manager [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] created_port_ids = self._update_ports_for_instance( [ 772.479108] env[62096]: ERROR nova.compute.manager [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 772.479108] env[62096]: ERROR nova.compute.manager [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] with excutils.save_and_reraise_exception(): [ 772.479108] env[62096]: ERROR nova.compute.manager [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 772.479108] env[62096]: ERROR nova.compute.manager [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] self.force_reraise() [ 772.479108] env[62096]: ERROR nova.compute.manager [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 772.479108] env[62096]: ERROR nova.compute.manager [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] raise self.value [ 772.479108] env[62096]: ERROR nova.compute.manager [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 772.479108] env[62096]: ERROR nova.compute.manager [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] updated_port = self._update_port( [ 772.479108] env[62096]: ERROR nova.compute.manager [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 772.479108] env[62096]: ERROR nova.compute.manager [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] _ensure_no_port_binding_failure(port) [ 772.479108] env[62096]: ERROR nova.compute.manager [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 772.479108] env[62096]: ERROR nova.compute.manager [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] raise exception.PortBindingFailed(port_id=port['id']) [ 772.479437] env[62096]: ERROR nova.compute.manager [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] nova.exception.PortBindingFailed: Binding failed for port f4e2ec88-4a20-4948-93f1-f9ff4ba04d70, please check neutron logs for more information. [ 772.479437] env[62096]: ERROR nova.compute.manager [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] [ 772.479437] env[62096]: INFO nova.compute.manager [None req-f0b7d82a-4b7b-42e3-8d2d-3e9d2e37aade tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] Terminating instance [ 772.479437] env[62096]: DEBUG oslo_concurrency.lockutils [None req-f0b7d82a-4b7b-42e3-8d2d-3e9d2e37aade tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Acquiring lock "refresh_cache-de88f8a1-20a5-49f9-adcb-de48aeaa548a" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 772.479437] env[62096]: DEBUG oslo_concurrency.lockutils [None req-f0b7d82a-4b7b-42e3-8d2d-3e9d2e37aade tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Acquired lock "refresh_cache-de88f8a1-20a5-49f9-adcb-de48aeaa548a" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 772.479437] env[62096]: DEBUG nova.network.neutron [None req-f0b7d82a-4b7b-42e3-8d2d-3e9d2e37aade tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 772.479821] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-f0b7d82a-4b7b-42e3-8d2d-3e9d2e37aade tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg d41e98cdf5ba43159b59a3fb306fef3e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 772.486302] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d41e98cdf5ba43159b59a3fb306fef3e [ 772.510541] env[62096]: DEBUG nova.compute.manager [req-952da3fc-c654-4dac-b897-02687c379cf8 req-5ead6a69-317b-4abb-ada6-2e52903588e9 service nova] [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] Received event network-changed-f4e2ec88-4a20-4948-93f1-f9ff4ba04d70 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 772.510774] env[62096]: DEBUG nova.compute.manager [req-952da3fc-c654-4dac-b897-02687c379cf8 req-5ead6a69-317b-4abb-ada6-2e52903588e9 service nova] [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] Refreshing instance network info cache due to event network-changed-f4e2ec88-4a20-4948-93f1-f9ff4ba04d70. {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 772.511004] env[62096]: DEBUG oslo_concurrency.lockutils [req-952da3fc-c654-4dac-b897-02687c379cf8 req-5ead6a69-317b-4abb-ada6-2e52903588e9 service nova] Acquiring lock "refresh_cache-de88f8a1-20a5-49f9-adcb-de48aeaa548a" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 772.874440] env[62096]: DEBUG nova.compute.utils [None req-36b7f3a2-925c-4aea-b829-50ed82eff8b4 tempest-ImagesNegativeTestJSON-1271305456 tempest-ImagesNegativeTestJSON-1271305456-project-member] Using /dev/sd instead of None {{(pid=62096) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 772.875975] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-36b7f3a2-925c-4aea-b829-50ed82eff8b4 tempest-ImagesNegativeTestJSON-1271305456 tempest-ImagesNegativeTestJSON-1271305456-project-member] Expecting reply to msg cde9031463944206b50f008d0af335ee in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 772.880088] env[62096]: DEBUG nova.compute.manager [None req-36b7f3a2-925c-4aea-b829-50ed82eff8b4 tempest-ImagesNegativeTestJSON-1271305456 tempest-ImagesNegativeTestJSON-1271305456-project-member] [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] Allocating IP information in the background. {{(pid=62096) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 772.880088] env[62096]: DEBUG nova.network.neutron [None req-36b7f3a2-925c-4aea-b829-50ed82eff8b4 tempest-ImagesNegativeTestJSON-1271305456 tempest-ImagesNegativeTestJSON-1271305456-project-member] [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] allocate_for_instance() {{(pid=62096) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 772.886386] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cde9031463944206b50f008d0af335ee [ 772.952212] env[62096]: DEBUG nova.policy [None req-36b7f3a2-925c-4aea-b829-50ed82eff8b4 tempest-ImagesNegativeTestJSON-1271305456 tempest-ImagesNegativeTestJSON-1271305456-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0e9021ac7d5042ff86204c90e598c1e6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bb654bf2783f482a87d83d4c7ad7be5b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62096) authorize /opt/stack/nova/nova/policy.py:203}} [ 773.012286] env[62096]: DEBUG nova.network.neutron [None req-f0b7d82a-4b7b-42e3-8d2d-3e9d2e37aade tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 773.209224] env[62096]: DEBUG nova.network.neutron [None req-f0b7d82a-4b7b-42e3-8d2d-3e9d2e37aade tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 773.209759] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-f0b7d82a-4b7b-42e3-8d2d-3e9d2e37aade tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg f7e8597455fd4e63b61bf40916fee86a in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 773.219092] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f7e8597455fd4e63b61bf40916fee86a [ 773.257291] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1576025-5b18-400d-896a-d77b616bed05 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.268633] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6eb52398-0027-4e1f-b512-8713faa76d63 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.320862] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c39cbf86-a5cc-4b6e-b976-e843c57755f9 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.328674] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e798aa8f-8d43-4a3e-b5e6-ac92d3584e22 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.342496] env[62096]: DEBUG nova.compute.provider_tree [None req-f7b2ac44-6793-4b58-b560-5a800041e557 tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 773.343013] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-f7b2ac44-6793-4b58-b560-5a800041e557 tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Expecting reply to msg 054a959a808845e49bc79410284cd3a1 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 773.349943] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 054a959a808845e49bc79410284cd3a1 [ 773.380224] env[62096]: DEBUG nova.compute.manager [None req-36b7f3a2-925c-4aea-b829-50ed82eff8b4 tempest-ImagesNegativeTestJSON-1271305456 tempest-ImagesNegativeTestJSON-1271305456-project-member] [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] Start building block device mappings for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 773.382615] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-36b7f3a2-925c-4aea-b829-50ed82eff8b4 tempest-ImagesNegativeTestJSON-1271305456 tempest-ImagesNegativeTestJSON-1271305456-project-member] Expecting reply to msg 0ce90e6eb3f94dd0a11d2d7b223de268 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 773.417379] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0ce90e6eb3f94dd0a11d2d7b223de268 [ 773.681116] env[62096]: DEBUG nova.network.neutron [None req-36b7f3a2-925c-4aea-b829-50ed82eff8b4 tempest-ImagesNegativeTestJSON-1271305456 tempest-ImagesNegativeTestJSON-1271305456-project-member] [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] Successfully created port: ddbe541a-41cd-49f6-8467-7f80404aec07 {{(pid=62096) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 773.711778] env[62096]: DEBUG oslo_concurrency.lockutils [None req-f0b7d82a-4b7b-42e3-8d2d-3e9d2e37aade tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Releasing lock "refresh_cache-de88f8a1-20a5-49f9-adcb-de48aeaa548a" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 773.712229] env[62096]: DEBUG nova.compute.manager [None req-f0b7d82a-4b7b-42e3-8d2d-3e9d2e37aade tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] Start destroying the instance on the hypervisor. {{(pid=62096) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 773.712425] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-f0b7d82a-4b7b-42e3-8d2d-3e9d2e37aade tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] Destroying instance {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 773.712738] env[62096]: DEBUG oslo_concurrency.lockutils [req-952da3fc-c654-4dac-b897-02687c379cf8 req-5ead6a69-317b-4abb-ada6-2e52903588e9 service nova] Acquired lock "refresh_cache-de88f8a1-20a5-49f9-adcb-de48aeaa548a" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 773.713008] env[62096]: DEBUG nova.network.neutron [req-952da3fc-c654-4dac-b897-02687c379cf8 req-5ead6a69-317b-4abb-ada6-2e52903588e9 service nova] [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] Refreshing network info cache for port f4e2ec88-4a20-4948-93f1-f9ff4ba04d70 {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 773.713452] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-952da3fc-c654-4dac-b897-02687c379cf8 req-5ead6a69-317b-4abb-ada6-2e52903588e9 service nova] Expecting reply to msg 8af68c573bfd4f518f67b90db5ba9369 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 773.714330] env[62096]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5abe711a-3e52-436e-a23e-828e29e942f0 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.721205] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8af68c573bfd4f518f67b90db5ba9369 [ 773.724993] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed13b6b4-c505-4b2c-a809-b700ef181388 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.757716] env[62096]: WARNING nova.virt.vmwareapi.vmops [None req-f0b7d82a-4b7b-42e3-8d2d-3e9d2e37aade tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance de88f8a1-20a5-49f9-adcb-de48aeaa548a could not be found. [ 773.758003] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-f0b7d82a-4b7b-42e3-8d2d-3e9d2e37aade tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] Instance destroyed {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 773.758186] env[62096]: INFO nova.compute.manager [None req-f0b7d82a-4b7b-42e3-8d2d-3e9d2e37aade tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] Took 0.05 seconds to destroy the instance on the hypervisor. [ 773.758425] env[62096]: DEBUG oslo.service.loopingcall [None req-f0b7d82a-4b7b-42e3-8d2d-3e9d2e37aade tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 773.758632] env[62096]: DEBUG nova.compute.manager [-] [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 773.758720] env[62096]: DEBUG nova.network.neutron [-] [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 773.782484] env[62096]: DEBUG nova.network.neutron [-] [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 773.782992] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 89a825ccb1254703bf142351ec9c9990 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 773.791718] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 89a825ccb1254703bf142351ec9c9990 [ 773.848050] env[62096]: DEBUG nova.scheduler.client.report [None req-f7b2ac44-6793-4b58-b560-5a800041e557 tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 773.848966] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-f7b2ac44-6793-4b58-b560-5a800041e557 tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Expecting reply to msg dae897f1aca74a43855083a5899724ad in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 773.863373] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dae897f1aca74a43855083a5899724ad [ 773.886986] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-36b7f3a2-925c-4aea-b829-50ed82eff8b4 tempest-ImagesNegativeTestJSON-1271305456 tempest-ImagesNegativeTestJSON-1271305456-project-member] Expecting reply to msg 172ef03b7e1f4e02bcfa41e5565e6bfd in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 773.935812] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 172ef03b7e1f4e02bcfa41e5565e6bfd [ 774.252134] env[62096]: DEBUG nova.network.neutron [req-952da3fc-c654-4dac-b897-02687c379cf8 req-5ead6a69-317b-4abb-ada6-2e52903588e9 service nova] [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 774.284886] env[62096]: DEBUG nova.network.neutron [-] [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 774.285738] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 1bf15dffe4484692bff35bc49a180eed in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 774.296402] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1bf15dffe4484692bff35bc49a180eed [ 774.352528] env[62096]: DEBUG oslo_concurrency.lockutils [None req-f7b2ac44-6793-4b58-b560-5a800041e557 tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.982s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 774.352710] env[62096]: ERROR nova.compute.manager [None req-f7b2ac44-6793-4b58-b560-5a800041e557 tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port f277d93e-a15e-4bae-a5d0-cba98224d99e, please check neutron logs for more information. [ 774.352710] env[62096]: ERROR nova.compute.manager [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] Traceback (most recent call last): [ 774.352710] env[62096]: ERROR nova.compute.manager [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 774.352710] env[62096]: ERROR nova.compute.manager [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] self.driver.spawn(context, instance, image_meta, [ 774.352710] env[62096]: ERROR nova.compute.manager [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 774.352710] env[62096]: ERROR nova.compute.manager [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] self._vmops.spawn(context, instance, image_meta, injected_files, [ 774.352710] env[62096]: ERROR nova.compute.manager [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 774.352710] env[62096]: ERROR nova.compute.manager [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] vm_ref = self.build_virtual_machine(instance, [ 774.352710] env[62096]: ERROR nova.compute.manager [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 774.352710] env[62096]: ERROR nova.compute.manager [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] vif_infos = vmwarevif.get_vif_info(self._session, [ 774.352710] env[62096]: ERROR nova.compute.manager [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 774.352992] env[62096]: ERROR nova.compute.manager [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] for vif in network_info: [ 774.352992] env[62096]: ERROR nova.compute.manager [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 774.352992] env[62096]: ERROR nova.compute.manager [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] return self._sync_wrapper(fn, *args, **kwargs) [ 774.352992] env[62096]: ERROR nova.compute.manager [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 774.352992] env[62096]: ERROR nova.compute.manager [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] self.wait() [ 774.352992] env[62096]: ERROR nova.compute.manager [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 774.352992] env[62096]: ERROR nova.compute.manager [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] self[:] = self._gt.wait() [ 774.352992] env[62096]: ERROR nova.compute.manager [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 774.352992] env[62096]: ERROR nova.compute.manager [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] return self._exit_event.wait() [ 774.352992] env[62096]: ERROR nova.compute.manager [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 774.352992] env[62096]: ERROR nova.compute.manager [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] result = hub.switch() [ 774.352992] env[62096]: ERROR nova.compute.manager [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 774.352992] env[62096]: ERROR nova.compute.manager [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] return self.greenlet.switch() [ 774.353381] env[62096]: ERROR nova.compute.manager [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 774.353381] env[62096]: ERROR nova.compute.manager [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] result = function(*args, **kwargs) [ 774.353381] env[62096]: ERROR nova.compute.manager [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 774.353381] env[62096]: ERROR nova.compute.manager [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] return func(*args, **kwargs) [ 774.353381] env[62096]: ERROR nova.compute.manager [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 774.353381] env[62096]: ERROR nova.compute.manager [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] raise e [ 774.353381] env[62096]: ERROR nova.compute.manager [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 774.353381] env[62096]: ERROR nova.compute.manager [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] nwinfo = self.network_api.allocate_for_instance( [ 774.353381] env[62096]: ERROR nova.compute.manager [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 774.353381] env[62096]: ERROR nova.compute.manager [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] created_port_ids = self._update_ports_for_instance( [ 774.353381] env[62096]: ERROR nova.compute.manager [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 774.353381] env[62096]: ERROR nova.compute.manager [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] with excutils.save_and_reraise_exception(): [ 774.353381] env[62096]: ERROR nova.compute.manager [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 774.353672] env[62096]: ERROR nova.compute.manager [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] self.force_reraise() [ 774.353672] env[62096]: ERROR nova.compute.manager [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 774.353672] env[62096]: ERROR nova.compute.manager [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] raise self.value [ 774.353672] env[62096]: ERROR nova.compute.manager [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 774.353672] env[62096]: ERROR nova.compute.manager [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] updated_port = self._update_port( [ 774.353672] env[62096]: ERROR nova.compute.manager [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 774.353672] env[62096]: ERROR nova.compute.manager [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] _ensure_no_port_binding_failure(port) [ 774.353672] env[62096]: ERROR nova.compute.manager [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 774.353672] env[62096]: ERROR nova.compute.manager [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] raise exception.PortBindingFailed(port_id=port['id']) [ 774.353672] env[62096]: ERROR nova.compute.manager [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] nova.exception.PortBindingFailed: Binding failed for port f277d93e-a15e-4bae-a5d0-cba98224d99e, please check neutron logs for more information. [ 774.353672] env[62096]: ERROR nova.compute.manager [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] [ 774.353986] env[62096]: DEBUG nova.compute.utils [None req-f7b2ac44-6793-4b58-b560-5a800041e557 tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] Binding failed for port f277d93e-a15e-4bae-a5d0-cba98224d99e, please check neutron logs for more information. {{(pid=62096) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 774.354894] env[62096]: DEBUG oslo_concurrency.lockutils [None req-5fe8118b-4ebb-4166-ab18-d024b5229d8a tempest-ServerRescueTestJSONUnderV235-1957248754 tempest-ServerRescueTestJSONUnderV235-1957248754-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.651s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 774.357355] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-5fe8118b-4ebb-4166-ab18-d024b5229d8a tempest-ServerRescueTestJSONUnderV235-1957248754 tempest-ServerRescueTestJSONUnderV235-1957248754-project-member] Expecting reply to msg ebde65aec39b4067bffd04cc0ca1a2ae in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 774.358967] env[62096]: DEBUG nova.compute.manager [None req-f7b2ac44-6793-4b58-b560-5a800041e557 tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] Build of instance 7bfac3e2-f06c-4690-9215-a5f67a67c5bd was re-scheduled: Binding failed for port f277d93e-a15e-4bae-a5d0-cba98224d99e, please check neutron logs for more information. {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 774.359394] env[62096]: DEBUG nova.compute.manager [None req-f7b2ac44-6793-4b58-b560-5a800041e557 tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] Unplugging VIFs for instance {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 774.359611] env[62096]: DEBUG oslo_concurrency.lockutils [None req-f7b2ac44-6793-4b58-b560-5a800041e557 tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Acquiring lock "refresh_cache-7bfac3e2-f06c-4690-9215-a5f67a67c5bd" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 774.359750] env[62096]: DEBUG oslo_concurrency.lockutils [None req-f7b2ac44-6793-4b58-b560-5a800041e557 tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Acquired lock "refresh_cache-7bfac3e2-f06c-4690-9215-a5f67a67c5bd" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 774.359899] env[62096]: DEBUG nova.network.neutron [None req-f7b2ac44-6793-4b58-b560-5a800041e557 tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 774.360669] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-f7b2ac44-6793-4b58-b560-5a800041e557 tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Expecting reply to msg 739c94a5009e42d5b5d9ef97b86627bb in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 774.369482] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 739c94a5009e42d5b5d9ef97b86627bb [ 774.371101] env[62096]: DEBUG nova.network.neutron [req-952da3fc-c654-4dac-b897-02687c379cf8 req-5ead6a69-317b-4abb-ada6-2e52903588e9 service nova] [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 774.372112] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-952da3fc-c654-4dac-b897-02687c379cf8 req-5ead6a69-317b-4abb-ada6-2e52903588e9 service nova] Expecting reply to msg 8824fab309cc4c7c84755029aca10cba in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 774.390459] env[62096]: DEBUG nova.compute.manager [None req-36b7f3a2-925c-4aea-b829-50ed82eff8b4 tempest-ImagesNegativeTestJSON-1271305456 tempest-ImagesNegativeTestJSON-1271305456-project-member] [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] Start spawning the instance on the hypervisor. {{(pid=62096) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 774.414852] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8824fab309cc4c7c84755029aca10cba [ 774.417789] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ebde65aec39b4067bffd04cc0ca1a2ae [ 774.421982] env[62096]: DEBUG nova.virt.hardware [None req-36b7f3a2-925c-4aea-b829-50ed82eff8b4 tempest-ImagesNegativeTestJSON-1271305456 tempest-ImagesNegativeTestJSON-1271305456-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-19T11:52:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-19T11:51:55Z,direct_url=,disk_format='vmdk',id=fb764baa-3805-45c4-a694-aa91b0932110,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bd76085d004b408bbcf1ebf76ba2593e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-19T11:51:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 774.422233] env[62096]: DEBUG nova.virt.hardware [None req-36b7f3a2-925c-4aea-b829-50ed82eff8b4 tempest-ImagesNegativeTestJSON-1271305456 tempest-ImagesNegativeTestJSON-1271305456-project-member] Flavor limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 774.422373] env[62096]: DEBUG nova.virt.hardware [None req-36b7f3a2-925c-4aea-b829-50ed82eff8b4 tempest-ImagesNegativeTestJSON-1271305456 tempest-ImagesNegativeTestJSON-1271305456-project-member] Image limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 774.422664] env[62096]: DEBUG nova.virt.hardware [None req-36b7f3a2-925c-4aea-b829-50ed82eff8b4 tempest-ImagesNegativeTestJSON-1271305456 tempest-ImagesNegativeTestJSON-1271305456-project-member] Flavor pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 774.422746] env[62096]: DEBUG nova.virt.hardware [None req-36b7f3a2-925c-4aea-b829-50ed82eff8b4 tempest-ImagesNegativeTestJSON-1271305456 tempest-ImagesNegativeTestJSON-1271305456-project-member] Image pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 774.422850] env[62096]: DEBUG nova.virt.hardware [None req-36b7f3a2-925c-4aea-b829-50ed82eff8b4 tempest-ImagesNegativeTestJSON-1271305456 tempest-ImagesNegativeTestJSON-1271305456-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 774.423064] env[62096]: DEBUG nova.virt.hardware [None req-36b7f3a2-925c-4aea-b829-50ed82eff8b4 tempest-ImagesNegativeTestJSON-1271305456 tempest-ImagesNegativeTestJSON-1271305456-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 774.423233] env[62096]: DEBUG nova.virt.hardware [None req-36b7f3a2-925c-4aea-b829-50ed82eff8b4 tempest-ImagesNegativeTestJSON-1271305456 tempest-ImagesNegativeTestJSON-1271305456-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 774.423499] env[62096]: DEBUG nova.virt.hardware [None req-36b7f3a2-925c-4aea-b829-50ed82eff8b4 tempest-ImagesNegativeTestJSON-1271305456 tempest-ImagesNegativeTestJSON-1271305456-project-member] Got 1 possible topologies {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 774.423572] env[62096]: DEBUG nova.virt.hardware [None req-36b7f3a2-925c-4aea-b829-50ed82eff8b4 tempest-ImagesNegativeTestJSON-1271305456 tempest-ImagesNegativeTestJSON-1271305456-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 774.423910] env[62096]: DEBUG nova.virt.hardware [None req-36b7f3a2-925c-4aea-b829-50ed82eff8b4 tempest-ImagesNegativeTestJSON-1271305456 tempest-ImagesNegativeTestJSON-1271305456-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 774.424866] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0925104-4305-4dbb-afc2-e4657be34089 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.432896] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b476191-ad69-4691-bc9f-d2a20ad9e692 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.480029] env[62096]: DEBUG oslo_concurrency.lockutils [None req-bb5902e0-898e-4848-be60-0804ae5f57a5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Acquiring lock "aa3e06b6-89f8-425f-8950-30deb025a473" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 774.480300] env[62096]: DEBUG oslo_concurrency.lockutils [None req-bb5902e0-898e-4848-be60-0804ae5f57a5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Lock "aa3e06b6-89f8-425f-8950-30deb025a473" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 774.609225] env[62096]: DEBUG nova.compute.manager [req-adb0ff06-990a-42ab-bb68-a7c6fe91ab10 req-039d7658-35b5-4e63-877d-e4dddebe4fb9 service nova] [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] Received event network-vif-deleted-f4e2ec88-4a20-4948-93f1-f9ff4ba04d70 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 774.787742] env[62096]: INFO nova.compute.manager [-] [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] Took 1.03 seconds to deallocate network for instance. [ 774.790134] env[62096]: DEBUG nova.compute.claims [None req-f0b7d82a-4b7b-42e3-8d2d-3e9d2e37aade tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] Aborting claim: {{(pid=62096) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 774.790320] env[62096]: DEBUG oslo_concurrency.lockutils [None req-f0b7d82a-4b7b-42e3-8d2d-3e9d2e37aade tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 774.875474] env[62096]: DEBUG oslo_concurrency.lockutils [req-952da3fc-c654-4dac-b897-02687c379cf8 req-5ead6a69-317b-4abb-ada6-2e52903588e9 service nova] Releasing lock "refresh_cache-de88f8a1-20a5-49f9-adcb-de48aeaa548a" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 774.901379] env[62096]: DEBUG nova.network.neutron [None req-f7b2ac44-6793-4b58-b560-5a800041e557 tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 774.962098] env[62096]: ERROR nova.compute.manager [None req-36b7f3a2-925c-4aea-b829-50ed82eff8b4 tempest-ImagesNegativeTestJSON-1271305456 tempest-ImagesNegativeTestJSON-1271305456-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port ddbe541a-41cd-49f6-8467-7f80404aec07, please check neutron logs for more information. [ 774.962098] env[62096]: ERROR nova.compute.manager Traceback (most recent call last): [ 774.962098] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 774.962098] env[62096]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 774.962098] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 774.962098] env[62096]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 774.962098] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 774.962098] env[62096]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 774.962098] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 774.962098] env[62096]: ERROR nova.compute.manager self.force_reraise() [ 774.962098] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 774.962098] env[62096]: ERROR nova.compute.manager raise self.value [ 774.962098] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 774.962098] env[62096]: ERROR nova.compute.manager updated_port = self._update_port( [ 774.962098] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 774.962098] env[62096]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 774.962603] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 774.962603] env[62096]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 774.962603] env[62096]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port ddbe541a-41cd-49f6-8467-7f80404aec07, please check neutron logs for more information. [ 774.962603] env[62096]: ERROR nova.compute.manager [ 774.962603] env[62096]: Traceback (most recent call last): [ 774.962603] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 774.962603] env[62096]: listener.cb(fileno) [ 774.962603] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 774.962603] env[62096]: result = function(*args, **kwargs) [ 774.962603] env[62096]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 774.962603] env[62096]: return func(*args, **kwargs) [ 774.962603] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 774.962603] env[62096]: raise e [ 774.962603] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 774.962603] env[62096]: nwinfo = self.network_api.allocate_for_instance( [ 774.962603] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 774.962603] env[62096]: created_port_ids = self._update_ports_for_instance( [ 774.962603] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 774.962603] env[62096]: with excutils.save_and_reraise_exception(): [ 774.962603] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 774.962603] env[62096]: self.force_reraise() [ 774.962603] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 774.962603] env[62096]: raise self.value [ 774.962603] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 774.962603] env[62096]: updated_port = self._update_port( [ 774.962603] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 774.962603] env[62096]: _ensure_no_port_binding_failure(port) [ 774.962603] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 774.962603] env[62096]: raise exception.PortBindingFailed(port_id=port['id']) [ 774.963774] env[62096]: nova.exception.PortBindingFailed: Binding failed for port ddbe541a-41cd-49f6-8467-7f80404aec07, please check neutron logs for more information. [ 774.963774] env[62096]: Removing descriptor: 19 [ 774.963774] env[62096]: ERROR nova.compute.manager [None req-36b7f3a2-925c-4aea-b829-50ed82eff8b4 tempest-ImagesNegativeTestJSON-1271305456 tempest-ImagesNegativeTestJSON-1271305456-project-member] [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port ddbe541a-41cd-49f6-8467-7f80404aec07, please check neutron logs for more information. [ 774.963774] env[62096]: ERROR nova.compute.manager [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] Traceback (most recent call last): [ 774.963774] env[62096]: ERROR nova.compute.manager [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 774.963774] env[62096]: ERROR nova.compute.manager [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] yield resources [ 774.963774] env[62096]: ERROR nova.compute.manager [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 774.963774] env[62096]: ERROR nova.compute.manager [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] self.driver.spawn(context, instance, image_meta, [ 774.963774] env[62096]: ERROR nova.compute.manager [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 774.963774] env[62096]: ERROR nova.compute.manager [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] self._vmops.spawn(context, instance, image_meta, injected_files, [ 774.963774] env[62096]: ERROR nova.compute.manager [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 774.963774] env[62096]: ERROR nova.compute.manager [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] vm_ref = self.build_virtual_machine(instance, [ 774.964119] env[62096]: ERROR nova.compute.manager [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 774.964119] env[62096]: ERROR nova.compute.manager [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] vif_infos = vmwarevif.get_vif_info(self._session, [ 774.964119] env[62096]: ERROR nova.compute.manager [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 774.964119] env[62096]: ERROR nova.compute.manager [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] for vif in network_info: [ 774.964119] env[62096]: ERROR nova.compute.manager [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 774.964119] env[62096]: ERROR nova.compute.manager [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] return self._sync_wrapper(fn, *args, **kwargs) [ 774.964119] env[62096]: ERROR nova.compute.manager [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 774.964119] env[62096]: ERROR nova.compute.manager [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] self.wait() [ 774.964119] env[62096]: ERROR nova.compute.manager [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 774.964119] env[62096]: ERROR nova.compute.manager [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] self[:] = self._gt.wait() [ 774.964119] env[62096]: ERROR nova.compute.manager [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 774.964119] env[62096]: ERROR nova.compute.manager [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] return self._exit_event.wait() [ 774.964119] env[62096]: ERROR nova.compute.manager [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 774.964451] env[62096]: ERROR nova.compute.manager [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] result = hub.switch() [ 774.964451] env[62096]: ERROR nova.compute.manager [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 774.964451] env[62096]: ERROR nova.compute.manager [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] return self.greenlet.switch() [ 774.964451] env[62096]: ERROR nova.compute.manager [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 774.964451] env[62096]: ERROR nova.compute.manager [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] result = function(*args, **kwargs) [ 774.964451] env[62096]: ERROR nova.compute.manager [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 774.964451] env[62096]: ERROR nova.compute.manager [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] return func(*args, **kwargs) [ 774.964451] env[62096]: ERROR nova.compute.manager [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 774.964451] env[62096]: ERROR nova.compute.manager [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] raise e [ 774.964451] env[62096]: ERROR nova.compute.manager [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 774.964451] env[62096]: ERROR nova.compute.manager [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] nwinfo = self.network_api.allocate_for_instance( [ 774.964451] env[62096]: ERROR nova.compute.manager [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 774.964451] env[62096]: ERROR nova.compute.manager [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] created_port_ids = self._update_ports_for_instance( [ 774.964762] env[62096]: ERROR nova.compute.manager [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 774.964762] env[62096]: ERROR nova.compute.manager [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] with excutils.save_and_reraise_exception(): [ 774.964762] env[62096]: ERROR nova.compute.manager [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 774.964762] env[62096]: ERROR nova.compute.manager [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] self.force_reraise() [ 774.964762] env[62096]: ERROR nova.compute.manager [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 774.964762] env[62096]: ERROR nova.compute.manager [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] raise self.value [ 774.964762] env[62096]: ERROR nova.compute.manager [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 774.964762] env[62096]: ERROR nova.compute.manager [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] updated_port = self._update_port( [ 774.964762] env[62096]: ERROR nova.compute.manager [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 774.964762] env[62096]: ERROR nova.compute.manager [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] _ensure_no_port_binding_failure(port) [ 774.964762] env[62096]: ERROR nova.compute.manager [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 774.964762] env[62096]: ERROR nova.compute.manager [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] raise exception.PortBindingFailed(port_id=port['id']) [ 774.965148] env[62096]: ERROR nova.compute.manager [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] nova.exception.PortBindingFailed: Binding failed for port ddbe541a-41cd-49f6-8467-7f80404aec07, please check neutron logs for more information. [ 774.965148] env[62096]: ERROR nova.compute.manager [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] [ 774.965148] env[62096]: INFO nova.compute.manager [None req-36b7f3a2-925c-4aea-b829-50ed82eff8b4 tempest-ImagesNegativeTestJSON-1271305456 tempest-ImagesNegativeTestJSON-1271305456-project-member] [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] Terminating instance [ 774.965757] env[62096]: DEBUG oslo_concurrency.lockutils [None req-36b7f3a2-925c-4aea-b829-50ed82eff8b4 tempest-ImagesNegativeTestJSON-1271305456 tempest-ImagesNegativeTestJSON-1271305456-project-member] Acquiring lock "refresh_cache-d7bd302c-1d85-45c2-9a3e-9855a6488d92" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 774.965912] env[62096]: DEBUG oslo_concurrency.lockutils [None req-36b7f3a2-925c-4aea-b829-50ed82eff8b4 tempest-ImagesNegativeTestJSON-1271305456 tempest-ImagesNegativeTestJSON-1271305456-project-member] Acquired lock "refresh_cache-d7bd302c-1d85-45c2-9a3e-9855a6488d92" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 774.966202] env[62096]: DEBUG nova.network.neutron [None req-36b7f3a2-925c-4aea-b829-50ed82eff8b4 tempest-ImagesNegativeTestJSON-1271305456 tempest-ImagesNegativeTestJSON-1271305456-project-member] [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 774.966657] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-36b7f3a2-925c-4aea-b829-50ed82eff8b4 tempest-ImagesNegativeTestJSON-1271305456 tempest-ImagesNegativeTestJSON-1271305456-project-member] Expecting reply to msg 6d1103a6bff0447a915a86bb0b4ba651 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 774.981274] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6d1103a6bff0447a915a86bb0b4ba651 [ 775.027927] env[62096]: DEBUG nova.network.neutron [None req-f7b2ac44-6793-4b58-b560-5a800041e557 tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 775.028669] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-f7b2ac44-6793-4b58-b560-5a800041e557 tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Expecting reply to msg 73d560e9a5a24cafabe685bff4299470 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 775.038620] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 73d560e9a5a24cafabe685bff4299470 [ 775.212512] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09181a74-77e7-4cce-82a6-0b5374843f47 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.222371] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25d441ad-0bb0-4366-acc8-2302944c1283 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.253585] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e113babc-77ea-456a-a8a5-8f382f06c175 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.261076] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b1913d4-1718-468b-8f90-3193cb62a093 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.275734] env[62096]: DEBUG nova.compute.provider_tree [None req-5fe8118b-4ebb-4166-ab18-d024b5229d8a tempest-ServerRescueTestJSONUnderV235-1957248754 tempest-ServerRescueTestJSONUnderV235-1957248754-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 775.276254] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-5fe8118b-4ebb-4166-ab18-d024b5229d8a tempest-ServerRescueTestJSONUnderV235-1957248754 tempest-ServerRescueTestJSONUnderV235-1957248754-project-member] Expecting reply to msg a78aeee287e14dc4a6a73156dcae1280 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 775.284240] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a78aeee287e14dc4a6a73156dcae1280 [ 775.484219] env[62096]: DEBUG nova.network.neutron [None req-36b7f3a2-925c-4aea-b829-50ed82eff8b4 tempest-ImagesNegativeTestJSON-1271305456 tempest-ImagesNegativeTestJSON-1271305456-project-member] [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 775.530756] env[62096]: DEBUG oslo_concurrency.lockutils [None req-f7b2ac44-6793-4b58-b560-5a800041e557 tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Releasing lock "refresh_cache-7bfac3e2-f06c-4690-9215-a5f67a67c5bd" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 775.531071] env[62096]: DEBUG nova.compute.manager [None req-f7b2ac44-6793-4b58-b560-5a800041e557 tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 775.531279] env[62096]: DEBUG nova.compute.manager [None req-f7b2ac44-6793-4b58-b560-5a800041e557 tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 775.531452] env[62096]: DEBUG nova.network.neutron [None req-f7b2ac44-6793-4b58-b560-5a800041e557 tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 775.551229] env[62096]: DEBUG nova.network.neutron [None req-f7b2ac44-6793-4b58-b560-5a800041e557 tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 775.552513] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-f7b2ac44-6793-4b58-b560-5a800041e557 tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Expecting reply to msg f31d958636a44c1d85ad602c7cca543f in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 775.560354] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f31d958636a44c1d85ad602c7cca543f [ 775.570160] env[62096]: DEBUG nova.network.neutron [None req-36b7f3a2-925c-4aea-b829-50ed82eff8b4 tempest-ImagesNegativeTestJSON-1271305456 tempest-ImagesNegativeTestJSON-1271305456-project-member] [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 775.570160] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-36b7f3a2-925c-4aea-b829-50ed82eff8b4 tempest-ImagesNegativeTestJSON-1271305456 tempest-ImagesNegativeTestJSON-1271305456-project-member] Expecting reply to msg 1e0859d9f1f74504bf471fc4c5548cc9 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 775.577749] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1e0859d9f1f74504bf471fc4c5548cc9 [ 775.778943] env[62096]: DEBUG nova.scheduler.client.report [None req-5fe8118b-4ebb-4166-ab18-d024b5229d8a tempest-ServerRescueTestJSONUnderV235-1957248754 tempest-ServerRescueTestJSONUnderV235-1957248754-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 775.781422] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-5fe8118b-4ebb-4166-ab18-d024b5229d8a tempest-ServerRescueTestJSONUnderV235-1957248754 tempest-ServerRescueTestJSONUnderV235-1957248754-project-member] Expecting reply to msg 2b09d2be5f2a4d9abb1d078ea5ddbdcb in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 775.795342] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2b09d2be5f2a4d9abb1d078ea5ddbdcb [ 776.054592] env[62096]: DEBUG nova.network.neutron [None req-f7b2ac44-6793-4b58-b560-5a800041e557 tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 776.055745] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-f7b2ac44-6793-4b58-b560-5a800041e557 tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Expecting reply to msg c36aef8e996a4caf9b64f528507b97b1 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 776.064891] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c36aef8e996a4caf9b64f528507b97b1 [ 776.072312] env[62096]: DEBUG oslo_concurrency.lockutils [None req-36b7f3a2-925c-4aea-b829-50ed82eff8b4 tempest-ImagesNegativeTestJSON-1271305456 tempest-ImagesNegativeTestJSON-1271305456-project-member] Releasing lock "refresh_cache-d7bd302c-1d85-45c2-9a3e-9855a6488d92" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 776.072710] env[62096]: DEBUG nova.compute.manager [None req-36b7f3a2-925c-4aea-b829-50ed82eff8b4 tempest-ImagesNegativeTestJSON-1271305456 tempest-ImagesNegativeTestJSON-1271305456-project-member] [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] Start destroying the instance on the hypervisor. {{(pid=62096) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 776.072902] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-36b7f3a2-925c-4aea-b829-50ed82eff8b4 tempest-ImagesNegativeTestJSON-1271305456 tempest-ImagesNegativeTestJSON-1271305456-project-member] [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] Destroying instance {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 776.073412] env[62096]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f0b3be44-abaf-4cf2-9b1c-9c6aa8f3343b {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.083261] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d76ce798-56cf-49e3-989f-540bb5c9e328 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.106493] env[62096]: WARNING nova.virt.vmwareapi.vmops [None req-36b7f3a2-925c-4aea-b829-50ed82eff8b4 tempest-ImagesNegativeTestJSON-1271305456 tempest-ImagesNegativeTestJSON-1271305456-project-member] [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d7bd302c-1d85-45c2-9a3e-9855a6488d92 could not be found. [ 776.106688] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-36b7f3a2-925c-4aea-b829-50ed82eff8b4 tempest-ImagesNegativeTestJSON-1271305456 tempest-ImagesNegativeTestJSON-1271305456-project-member] [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] Instance destroyed {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 776.106864] env[62096]: INFO nova.compute.manager [None req-36b7f3a2-925c-4aea-b829-50ed82eff8b4 tempest-ImagesNegativeTestJSON-1271305456 tempest-ImagesNegativeTestJSON-1271305456-project-member] [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] Took 0.03 seconds to destroy the instance on the hypervisor. [ 776.107100] env[62096]: DEBUG oslo.service.loopingcall [None req-36b7f3a2-925c-4aea-b829-50ed82eff8b4 tempest-ImagesNegativeTestJSON-1271305456 tempest-ImagesNegativeTestJSON-1271305456-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 776.107322] env[62096]: DEBUG nova.compute.manager [-] [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 776.107410] env[62096]: DEBUG nova.network.neutron [-] [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 776.129175] env[62096]: DEBUG nova.network.neutron [-] [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 776.129708] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 7c3306d15e1a4dbd8b445eef9380d967 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 776.136683] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7c3306d15e1a4dbd8b445eef9380d967 [ 776.284658] env[62096]: DEBUG oslo_concurrency.lockutils [None req-5fe8118b-4ebb-4166-ab18-d024b5229d8a tempest-ServerRescueTestJSONUnderV235-1957248754 tempest-ServerRescueTestJSONUnderV235-1957248754-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.930s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 776.285522] env[62096]: ERROR nova.compute.manager [None req-5fe8118b-4ebb-4166-ab18-d024b5229d8a tempest-ServerRescueTestJSONUnderV235-1957248754 tempest-ServerRescueTestJSONUnderV235-1957248754-project-member] [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port c7261eb5-f3d8-4ec4-ace3-402cb0fa8f20, please check neutron logs for more information. [ 776.285522] env[62096]: ERROR nova.compute.manager [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] Traceback (most recent call last): [ 776.285522] env[62096]: ERROR nova.compute.manager [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 776.285522] env[62096]: ERROR nova.compute.manager [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] self.driver.spawn(context, instance, image_meta, [ 776.285522] env[62096]: ERROR nova.compute.manager [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 776.285522] env[62096]: ERROR nova.compute.manager [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 776.285522] env[62096]: ERROR nova.compute.manager [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 776.285522] env[62096]: ERROR nova.compute.manager [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] vm_ref = self.build_virtual_machine(instance, [ 776.285522] env[62096]: ERROR nova.compute.manager [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 776.285522] env[62096]: ERROR nova.compute.manager [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] vif_infos = vmwarevif.get_vif_info(self._session, [ 776.285522] env[62096]: ERROR nova.compute.manager [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 776.285837] env[62096]: ERROR nova.compute.manager [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] for vif in network_info: [ 776.285837] env[62096]: ERROR nova.compute.manager [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 776.285837] env[62096]: ERROR nova.compute.manager [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] return self._sync_wrapper(fn, *args, **kwargs) [ 776.285837] env[62096]: ERROR nova.compute.manager [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 776.285837] env[62096]: ERROR nova.compute.manager [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] self.wait() [ 776.285837] env[62096]: ERROR nova.compute.manager [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 776.285837] env[62096]: ERROR nova.compute.manager [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] self[:] = self._gt.wait() [ 776.285837] env[62096]: ERROR nova.compute.manager [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 776.285837] env[62096]: ERROR nova.compute.manager [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] return self._exit_event.wait() [ 776.285837] env[62096]: ERROR nova.compute.manager [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 776.285837] env[62096]: ERROR nova.compute.manager [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] result = hub.switch() [ 776.285837] env[62096]: ERROR nova.compute.manager [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 776.285837] env[62096]: ERROR nova.compute.manager [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] return self.greenlet.switch() [ 776.286152] env[62096]: ERROR nova.compute.manager [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 776.286152] env[62096]: ERROR nova.compute.manager [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] result = function(*args, **kwargs) [ 776.286152] env[62096]: ERROR nova.compute.manager [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 776.286152] env[62096]: ERROR nova.compute.manager [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] return func(*args, **kwargs) [ 776.286152] env[62096]: ERROR nova.compute.manager [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 776.286152] env[62096]: ERROR nova.compute.manager [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] raise e [ 776.286152] env[62096]: ERROR nova.compute.manager [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 776.286152] env[62096]: ERROR nova.compute.manager [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] nwinfo = self.network_api.allocate_for_instance( [ 776.286152] env[62096]: ERROR nova.compute.manager [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 776.286152] env[62096]: ERROR nova.compute.manager [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] created_port_ids = self._update_ports_for_instance( [ 776.286152] env[62096]: ERROR nova.compute.manager [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 776.286152] env[62096]: ERROR nova.compute.manager [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] with excutils.save_and_reraise_exception(): [ 776.286152] env[62096]: ERROR nova.compute.manager [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 776.286464] env[62096]: ERROR nova.compute.manager [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] self.force_reraise() [ 776.286464] env[62096]: ERROR nova.compute.manager [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 776.286464] env[62096]: ERROR nova.compute.manager [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] raise self.value [ 776.286464] env[62096]: ERROR nova.compute.manager [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 776.286464] env[62096]: ERROR nova.compute.manager [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] updated_port = self._update_port( [ 776.286464] env[62096]: ERROR nova.compute.manager [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 776.286464] env[62096]: ERROR nova.compute.manager [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] _ensure_no_port_binding_failure(port) [ 776.286464] env[62096]: ERROR nova.compute.manager [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 776.286464] env[62096]: ERROR nova.compute.manager [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] raise exception.PortBindingFailed(port_id=port['id']) [ 776.286464] env[62096]: ERROR nova.compute.manager [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] nova.exception.PortBindingFailed: Binding failed for port c7261eb5-f3d8-4ec4-ace3-402cb0fa8f20, please check neutron logs for more information. [ 776.286464] env[62096]: ERROR nova.compute.manager [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] [ 776.286815] env[62096]: DEBUG nova.compute.utils [None req-5fe8118b-4ebb-4166-ab18-d024b5229d8a tempest-ServerRescueTestJSONUnderV235-1957248754 tempest-ServerRescueTestJSONUnderV235-1957248754-project-member] [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] Binding failed for port c7261eb5-f3d8-4ec4-ace3-402cb0fa8f20, please check neutron logs for more information. {{(pid=62096) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 776.288432] env[62096]: DEBUG oslo_concurrency.lockutils [None req-143f4a25-9058-4c14-8b18-575eda4d5e74 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.584s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 776.289825] env[62096]: INFO nova.compute.claims [None req-143f4a25-9058-4c14-8b18-575eda4d5e74 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 776.291379] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-143f4a25-9058-4c14-8b18-575eda4d5e74 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Expecting reply to msg f5ebf94d69dc4340a366465498aa56e8 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 776.293888] env[62096]: DEBUG nova.compute.manager [None req-5fe8118b-4ebb-4166-ab18-d024b5229d8a tempest-ServerRescueTestJSONUnderV235-1957248754 tempest-ServerRescueTestJSONUnderV235-1957248754-project-member] [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] Build of instance fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c was re-scheduled: Binding failed for port c7261eb5-f3d8-4ec4-ace3-402cb0fa8f20, please check neutron logs for more information. {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 776.293888] env[62096]: DEBUG nova.compute.manager [None req-5fe8118b-4ebb-4166-ab18-d024b5229d8a tempest-ServerRescueTestJSONUnderV235-1957248754 tempest-ServerRescueTestJSONUnderV235-1957248754-project-member] [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] Unplugging VIFs for instance {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 776.293888] env[62096]: DEBUG oslo_concurrency.lockutils [None req-5fe8118b-4ebb-4166-ab18-d024b5229d8a tempest-ServerRescueTestJSONUnderV235-1957248754 tempest-ServerRescueTestJSONUnderV235-1957248754-project-member] Acquiring lock "refresh_cache-fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 776.293888] env[62096]: DEBUG oslo_concurrency.lockutils [None req-5fe8118b-4ebb-4166-ab18-d024b5229d8a tempest-ServerRescueTestJSONUnderV235-1957248754 tempest-ServerRescueTestJSONUnderV235-1957248754-project-member] Acquired lock "refresh_cache-fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 776.294121] env[62096]: DEBUG nova.network.neutron [None req-5fe8118b-4ebb-4166-ab18-d024b5229d8a tempest-ServerRescueTestJSONUnderV235-1957248754 tempest-ServerRescueTestJSONUnderV235-1957248754-project-member] [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 776.294121] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-5fe8118b-4ebb-4166-ab18-d024b5229d8a tempest-ServerRescueTestJSONUnderV235-1957248754 tempest-ServerRescueTestJSONUnderV235-1957248754-project-member] Expecting reply to msg 8def1dafb2e54e889182715248f26701 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 776.306301] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8def1dafb2e54e889182715248f26701 [ 776.332741] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f5ebf94d69dc4340a366465498aa56e8 [ 776.558529] env[62096]: INFO nova.compute.manager [None req-f7b2ac44-6793-4b58-b560-5a800041e557 tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] [instance: 7bfac3e2-f06c-4690-9215-a5f67a67c5bd] Took 1.03 seconds to deallocate network for instance. [ 776.560270] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-f7b2ac44-6793-4b58-b560-5a800041e557 tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Expecting reply to msg ba042e9540394b839a9e62ad1a52bd37 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 776.591426] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ba042e9540394b839a9e62ad1a52bd37 [ 776.632084] env[62096]: DEBUG nova.network.neutron [-] [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 776.632751] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg fb86ada3e82e47339d94588e691d185f in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 776.638081] env[62096]: DEBUG nova.compute.manager [req-9e922e0b-8bf9-4cd8-ad62-15d3566bc92d req-0fb11de7-272a-4e12-9761-3173484132d6 service nova] [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] Received event network-changed-ddbe541a-41cd-49f6-8467-7f80404aec07 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 776.638263] env[62096]: DEBUG nova.compute.manager [req-9e922e0b-8bf9-4cd8-ad62-15d3566bc92d req-0fb11de7-272a-4e12-9761-3173484132d6 service nova] [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] Refreshing instance network info cache due to event network-changed-ddbe541a-41cd-49f6-8467-7f80404aec07. {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 776.638467] env[62096]: DEBUG oslo_concurrency.lockutils [req-9e922e0b-8bf9-4cd8-ad62-15d3566bc92d req-0fb11de7-272a-4e12-9761-3173484132d6 service nova] Acquiring lock "refresh_cache-d7bd302c-1d85-45c2-9a3e-9855a6488d92" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 776.638600] env[62096]: DEBUG oslo_concurrency.lockutils [req-9e922e0b-8bf9-4cd8-ad62-15d3566bc92d req-0fb11de7-272a-4e12-9761-3173484132d6 service nova] Acquired lock "refresh_cache-d7bd302c-1d85-45c2-9a3e-9855a6488d92" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 776.638757] env[62096]: DEBUG nova.network.neutron [req-9e922e0b-8bf9-4cd8-ad62-15d3566bc92d req-0fb11de7-272a-4e12-9761-3173484132d6 service nova] [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] Refreshing network info cache for port ddbe541a-41cd-49f6-8467-7f80404aec07 {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 776.639354] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-9e922e0b-8bf9-4cd8-ad62-15d3566bc92d req-0fb11de7-272a-4e12-9761-3173484132d6 service nova] Expecting reply to msg 9c3ebda67b2544329282c2c7cfeebddc in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 776.642978] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fb86ada3e82e47339d94588e691d185f [ 776.645874] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9c3ebda67b2544329282c2c7cfeebddc [ 776.797296] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-143f4a25-9058-4c14-8b18-575eda4d5e74 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Expecting reply to msg 74c8c2cfad9d40ec846aef36d196e41f in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 776.804714] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 74c8c2cfad9d40ec846aef36d196e41f [ 776.816091] env[62096]: DEBUG nova.network.neutron [None req-5fe8118b-4ebb-4166-ab18-d024b5229d8a tempest-ServerRescueTestJSONUnderV235-1957248754 tempest-ServerRescueTestJSONUnderV235-1957248754-project-member] [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 776.925585] env[62096]: DEBUG nova.network.neutron [None req-5fe8118b-4ebb-4166-ab18-d024b5229d8a tempest-ServerRescueTestJSONUnderV235-1957248754 tempest-ServerRescueTestJSONUnderV235-1957248754-project-member] [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 776.926316] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-5fe8118b-4ebb-4166-ab18-d024b5229d8a tempest-ServerRescueTestJSONUnderV235-1957248754 tempest-ServerRescueTestJSONUnderV235-1957248754-project-member] Expecting reply to msg 206db21128964b0c9c11b10986095dc8 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 776.934837] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 206db21128964b0c9c11b10986095dc8 [ 777.064456] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-f7b2ac44-6793-4b58-b560-5a800041e557 tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Expecting reply to msg d6ffcf0d759649ae93c2dea11a62e5c5 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 777.098260] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d6ffcf0d759649ae93c2dea11a62e5c5 [ 777.135418] env[62096]: INFO nova.compute.manager [-] [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] Took 1.03 seconds to deallocate network for instance. [ 777.137700] env[62096]: DEBUG nova.compute.claims [None req-36b7f3a2-925c-4aea-b829-50ed82eff8b4 tempest-ImagesNegativeTestJSON-1271305456 tempest-ImagesNegativeTestJSON-1271305456-project-member] [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] Aborting claim: {{(pid=62096) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 777.137877] env[62096]: DEBUG oslo_concurrency.lockutils [None req-36b7f3a2-925c-4aea-b829-50ed82eff8b4 tempest-ImagesNegativeTestJSON-1271305456 tempest-ImagesNegativeTestJSON-1271305456-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 777.159305] env[62096]: DEBUG nova.network.neutron [req-9e922e0b-8bf9-4cd8-ad62-15d3566bc92d req-0fb11de7-272a-4e12-9761-3173484132d6 service nova] [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 777.337356] env[62096]: DEBUG nova.network.neutron [req-9e922e0b-8bf9-4cd8-ad62-15d3566bc92d req-0fb11de7-272a-4e12-9761-3173484132d6 service nova] [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 777.337891] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-9e922e0b-8bf9-4cd8-ad62-15d3566bc92d req-0fb11de7-272a-4e12-9761-3173484132d6 service nova] Expecting reply to msg b940d1523810477e92e57b94f8d427cf in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 777.345794] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b940d1523810477e92e57b94f8d427cf [ 777.429124] env[62096]: DEBUG oslo_concurrency.lockutils [None req-5fe8118b-4ebb-4166-ab18-d024b5229d8a tempest-ServerRescueTestJSONUnderV235-1957248754 tempest-ServerRescueTestJSONUnderV235-1957248754-project-member] Releasing lock "refresh_cache-fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 777.429369] env[62096]: DEBUG nova.compute.manager [None req-5fe8118b-4ebb-4166-ab18-d024b5229d8a tempest-ServerRescueTestJSONUnderV235-1957248754 tempest-ServerRescueTestJSONUnderV235-1957248754-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 777.429746] env[62096]: DEBUG nova.compute.manager [None req-5fe8118b-4ebb-4166-ab18-d024b5229d8a tempest-ServerRescueTestJSONUnderV235-1957248754 tempest-ServerRescueTestJSONUnderV235-1957248754-project-member] [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 777.429988] env[62096]: DEBUG nova.network.neutron [None req-5fe8118b-4ebb-4166-ab18-d024b5229d8a tempest-ServerRescueTestJSONUnderV235-1957248754 tempest-ServerRescueTestJSONUnderV235-1957248754-project-member] [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 777.450658] env[62096]: DEBUG nova.network.neutron [None req-5fe8118b-4ebb-4166-ab18-d024b5229d8a tempest-ServerRescueTestJSONUnderV235-1957248754 tempest-ServerRescueTestJSONUnderV235-1957248754-project-member] [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 777.451152] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-5fe8118b-4ebb-4166-ab18-d024b5229d8a tempest-ServerRescueTestJSONUnderV235-1957248754 tempest-ServerRescueTestJSONUnderV235-1957248754-project-member] Expecting reply to msg bb65817cdc1a4a38a203ef386c5421ad in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 777.458580] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bb65817cdc1a4a38a203ef386c5421ad [ 777.583357] env[62096]: INFO nova.scheduler.client.report [None req-f7b2ac44-6793-4b58-b560-5a800041e557 tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Deleted allocations for instance 7bfac3e2-f06c-4690-9215-a5f67a67c5bd [ 777.594805] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-f7b2ac44-6793-4b58-b560-5a800041e557 tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Expecting reply to msg 98b955ce1b0f4322b3d16fc98324513c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 777.603617] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 98b955ce1b0f4322b3d16fc98324513c [ 777.691376] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65274ede-fd9c-4768-a18f-80046ae9e91a {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.698814] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-957681c2-afe7-4356-b72f-ba18ccc4f1cd {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.730800] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b4101e9-7a93-400e-b676-0076f790a9e8 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.738362] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7504b2a9-0cbe-4e5d-af70-686d8b7a2fe7 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.753275] env[62096]: DEBUG nova.compute.provider_tree [None req-143f4a25-9058-4c14-8b18-575eda4d5e74 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 777.753464] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-143f4a25-9058-4c14-8b18-575eda4d5e74 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Expecting reply to msg 780bbc7232364e3594df206e9678a9d3 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 777.760688] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 780bbc7232364e3594df206e9678a9d3 [ 777.840607] env[62096]: DEBUG oslo_concurrency.lockutils [req-9e922e0b-8bf9-4cd8-ad62-15d3566bc92d req-0fb11de7-272a-4e12-9761-3173484132d6 service nova] Releasing lock "refresh_cache-d7bd302c-1d85-45c2-9a3e-9855a6488d92" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 777.840888] env[62096]: DEBUG nova.compute.manager [req-9e922e0b-8bf9-4cd8-ad62-15d3566bc92d req-0fb11de7-272a-4e12-9761-3173484132d6 service nova] [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] Received event network-vif-deleted-ddbe541a-41cd-49f6-8467-7f80404aec07 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 777.953297] env[62096]: DEBUG nova.network.neutron [None req-5fe8118b-4ebb-4166-ab18-d024b5229d8a tempest-ServerRescueTestJSONUnderV235-1957248754 tempest-ServerRescueTestJSONUnderV235-1957248754-project-member] [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 777.953882] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-5fe8118b-4ebb-4166-ab18-d024b5229d8a tempest-ServerRescueTestJSONUnderV235-1957248754 tempest-ServerRescueTestJSONUnderV235-1957248754-project-member] Expecting reply to msg ce63917c3c0b47a4884abc6d1c442871 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 777.963630] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ce63917c3c0b47a4884abc6d1c442871 [ 778.092835] env[62096]: DEBUG oslo_concurrency.lockutils [None req-f7b2ac44-6793-4b58-b560-5a800041e557 tempest-MigrationsAdminTest-2139255800 tempest-MigrationsAdminTest-2139255800-project-member] Lock "7bfac3e2-f06c-4690-9215-a5f67a67c5bd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 154.529s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 778.093434] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Expecting reply to msg 4f1ce9a948af40b7905547b913d30f62 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 778.103372] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4f1ce9a948af40b7905547b913d30f62 [ 778.256345] env[62096]: DEBUG nova.scheduler.client.report [None req-143f4a25-9058-4c14-8b18-575eda4d5e74 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 778.258647] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-143f4a25-9058-4c14-8b18-575eda4d5e74 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Expecting reply to msg 2d774b0b955a49688361ec80597d70bd in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 778.270987] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2d774b0b955a49688361ec80597d70bd [ 778.456649] env[62096]: INFO nova.compute.manager [None req-5fe8118b-4ebb-4166-ab18-d024b5229d8a tempest-ServerRescueTestJSONUnderV235-1957248754 tempest-ServerRescueTestJSONUnderV235-1957248754-project-member] [instance: fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c] Took 1.03 seconds to deallocate network for instance. [ 778.458364] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-5fe8118b-4ebb-4166-ab18-d024b5229d8a tempest-ServerRescueTestJSONUnderV235-1957248754 tempest-ServerRescueTestJSONUnderV235-1957248754-project-member] Expecting reply to msg fa3fae4fd9c04f08b1a126cfc4453607 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 778.495756] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fa3fae4fd9c04f08b1a126cfc4453607 [ 778.598612] env[62096]: DEBUG nova.compute.manager [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] Starting instance... {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 778.598612] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Expecting reply to msg b321ae80b2b84ab8ac6303f5fdef9641 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 778.652703] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b321ae80b2b84ab8ac6303f5fdef9641 [ 778.765456] env[62096]: DEBUG oslo_concurrency.lockutils [None req-143f4a25-9058-4c14-8b18-575eda4d5e74 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.473s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 778.765456] env[62096]: DEBUG nova.compute.manager [None req-143f4a25-9058-4c14-8b18-575eda4d5e74 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] Start building networks asynchronously for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 778.765456] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-143f4a25-9058-4c14-8b18-575eda4d5e74 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Expecting reply to msg 1fbb97de136b4567b3354e3037404a57 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 778.768203] env[62096]: DEBUG oslo_concurrency.lockutils [None req-a3983baa-43a6-408d-8a39-88c7bf1db267 tempest-TenantUsagesTestJSON-1395762051 tempest-TenantUsagesTestJSON-1395762051-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.188s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 778.768203] env[62096]: INFO nova.compute.claims [None req-a3983baa-43a6-408d-8a39-88c7bf1db267 tempest-TenantUsagesTestJSON-1395762051 tempest-TenantUsagesTestJSON-1395762051-project-member] [instance: e8631ec4-1823-46d2-8553-05e3336fed32] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 778.770114] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a3983baa-43a6-408d-8a39-88c7bf1db267 tempest-TenantUsagesTestJSON-1395762051 tempest-TenantUsagesTestJSON-1395762051-project-member] Expecting reply to msg c0cd8a6f73744fd7bb6567e773553af5 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 778.796613] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1fbb97de136b4567b3354e3037404a57 [ 778.825675] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c0cd8a6f73744fd7bb6567e773553af5 [ 778.964745] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-5fe8118b-4ebb-4166-ab18-d024b5229d8a tempest-ServerRescueTestJSONUnderV235-1957248754 tempest-ServerRescueTestJSONUnderV235-1957248754-project-member] Expecting reply to msg 1d9fdb1f7a414d12b410468865822a30 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 779.028307] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1d9fdb1f7a414d12b410468865822a30 [ 779.117965] env[62096]: DEBUG oslo_concurrency.lockutils [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 779.268190] env[62096]: DEBUG nova.compute.utils [None req-143f4a25-9058-4c14-8b18-575eda4d5e74 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Using /dev/sd instead of None {{(pid=62096) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 779.268869] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-143f4a25-9058-4c14-8b18-575eda4d5e74 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Expecting reply to msg 3e07725b6a664ecf9f4ee16e19c95f06 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 779.270391] env[62096]: DEBUG nova.compute.manager [None req-143f4a25-9058-4c14-8b18-575eda4d5e74 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] Allocating IP information in the background. {{(pid=62096) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 779.270561] env[62096]: DEBUG nova.network.neutron [None req-143f4a25-9058-4c14-8b18-575eda4d5e74 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] allocate_for_instance() {{(pid=62096) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 779.273886] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a3983baa-43a6-408d-8a39-88c7bf1db267 tempest-TenantUsagesTestJSON-1395762051 tempest-TenantUsagesTestJSON-1395762051-project-member] Expecting reply to msg e7ab0cf361004b71ae6ab547596a60ad in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 779.284059] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3e07725b6a664ecf9f4ee16e19c95f06 [ 779.286918] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e7ab0cf361004b71ae6ab547596a60ad [ 779.367711] env[62096]: DEBUG nova.policy [None req-143f4a25-9058-4c14-8b18-575eda4d5e74 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'eccf4b6b1d2b496796ad12d6caad16ed', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '53edc9a849714cedab5fcd7b03ca6916', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62096) authorize /opt/stack/nova/nova/policy.py:203}} [ 779.488857] env[62096]: INFO nova.scheduler.client.report [None req-5fe8118b-4ebb-4166-ab18-d024b5229d8a tempest-ServerRescueTestJSONUnderV235-1957248754 tempest-ServerRescueTestJSONUnderV235-1957248754-project-member] Deleted allocations for instance fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c [ 779.494887] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-5fe8118b-4ebb-4166-ab18-d024b5229d8a tempest-ServerRescueTestJSONUnderV235-1957248754 tempest-ServerRescueTestJSONUnderV235-1957248754-project-member] Expecting reply to msg e8d8afada1fc4a7592da2ea84914b539 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 779.510965] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e8d8afada1fc4a7592da2ea84914b539 [ 779.769090] env[62096]: DEBUG nova.network.neutron [None req-143f4a25-9058-4c14-8b18-575eda4d5e74 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] Successfully created port: b29ebaaf-3fa3-40c7-86c7-fc7395ca11ff {{(pid=62096) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 779.771235] env[62096]: DEBUG nova.compute.manager [None req-143f4a25-9058-4c14-8b18-575eda4d5e74 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] Start building block device mappings for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 779.774059] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-143f4a25-9058-4c14-8b18-575eda4d5e74 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Expecting reply to msg 1dc61d0b7f394b7fb6382fc616581216 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 779.808124] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1dc61d0b7f394b7fb6382fc616581216 [ 779.997383] env[62096]: DEBUG oslo_concurrency.lockutils [None req-5fe8118b-4ebb-4166-ab18-d024b5229d8a tempest-ServerRescueTestJSONUnderV235-1957248754 tempest-ServerRescueTestJSONUnderV235-1957248754-project-member] Lock "fc70759e-1b1c-45a6-b41a-4d6c8c1a3c0c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 155.577s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 779.998700] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Expecting reply to msg 04cf08e1ef504d46889759671b46dd4f in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 780.017905] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 04cf08e1ef504d46889759671b46dd4f [ 780.116905] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59cbf8a6-3a84-416e-aa36-1f6805fc317d {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.124414] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02c758bb-464d-4e77-9155-95823972df93 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.156031] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e981278-d547-4db9-b881-c0a11fdaedec {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.163032] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8aed2c5b-c131-421f-8ab7-dce6bc338340 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.176447] env[62096]: DEBUG nova.compute.provider_tree [None req-a3983baa-43a6-408d-8a39-88c7bf1db267 tempest-TenantUsagesTestJSON-1395762051 tempest-TenantUsagesTestJSON-1395762051-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 780.176942] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a3983baa-43a6-408d-8a39-88c7bf1db267 tempest-TenantUsagesTestJSON-1395762051 tempest-TenantUsagesTestJSON-1395762051-project-member] Expecting reply to msg fbf20198935b416a803bba6cd9c6111c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 780.185877] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fbf20198935b416a803bba6cd9c6111c [ 780.281016] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-143f4a25-9058-4c14-8b18-575eda4d5e74 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Expecting reply to msg cbcf616f0f464dd6a6305dadc0455048 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 780.313868] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cbcf616f0f464dd6a6305dadc0455048 [ 780.500365] env[62096]: DEBUG nova.compute.manager [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] Starting instance... {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 780.502057] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Expecting reply to msg 491f6c7bd6bf417bbc465a051aadc0df in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 780.542238] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 491f6c7bd6bf417bbc465a051aadc0df [ 780.686641] env[62096]: DEBUG nova.scheduler.client.report [None req-a3983baa-43a6-408d-8a39-88c7bf1db267 tempest-TenantUsagesTestJSON-1395762051 tempest-TenantUsagesTestJSON-1395762051-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 780.689221] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a3983baa-43a6-408d-8a39-88c7bf1db267 tempest-TenantUsagesTestJSON-1395762051 tempest-TenantUsagesTestJSON-1395762051-project-member] Expecting reply to msg ed7bb6529d39476397295dadde4b38b3 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 780.705737] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ed7bb6529d39476397295dadde4b38b3 [ 780.792153] env[62096]: DEBUG nova.compute.manager [None req-143f4a25-9058-4c14-8b18-575eda4d5e74 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] Start spawning the instance on the hypervisor. {{(pid=62096) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 780.814302] env[62096]: DEBUG nova.virt.hardware [None req-143f4a25-9058-4c14-8b18-575eda4d5e74 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-19T11:52:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-19T11:51:55Z,direct_url=,disk_format='vmdk',id=fb764baa-3805-45c4-a694-aa91b0932110,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bd76085d004b408bbcf1ebf76ba2593e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-19T11:51:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 780.814543] env[62096]: DEBUG nova.virt.hardware [None req-143f4a25-9058-4c14-8b18-575eda4d5e74 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Flavor limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 780.814691] env[62096]: DEBUG nova.virt.hardware [None req-143f4a25-9058-4c14-8b18-575eda4d5e74 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Image limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 780.814869] env[62096]: DEBUG nova.virt.hardware [None req-143f4a25-9058-4c14-8b18-575eda4d5e74 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Flavor pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 780.815005] env[62096]: DEBUG nova.virt.hardware [None req-143f4a25-9058-4c14-8b18-575eda4d5e74 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Image pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 780.815144] env[62096]: DEBUG nova.virt.hardware [None req-143f4a25-9058-4c14-8b18-575eda4d5e74 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 780.815344] env[62096]: DEBUG nova.virt.hardware [None req-143f4a25-9058-4c14-8b18-575eda4d5e74 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 780.815495] env[62096]: DEBUG nova.virt.hardware [None req-143f4a25-9058-4c14-8b18-575eda4d5e74 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 780.815651] env[62096]: DEBUG nova.virt.hardware [None req-143f4a25-9058-4c14-8b18-575eda4d5e74 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Got 1 possible topologies {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 780.815804] env[62096]: DEBUG nova.virt.hardware [None req-143f4a25-9058-4c14-8b18-575eda4d5e74 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 780.815992] env[62096]: DEBUG nova.virt.hardware [None req-143f4a25-9058-4c14-8b18-575eda4d5e74 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 780.816916] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34ef139b-aac2-4fe1-bc70-b222093d7d15 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.826464] env[62096]: DEBUG oslo_concurrency.lockutils [None req-7bd8e5dd-5e58-4625-8471-f91ad349cc84 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Acquiring lock "5627f913-29d2-476e-8fde-8ea457cc56f2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 780.826681] env[62096]: DEBUG oslo_concurrency.lockutils [None req-7bd8e5dd-5e58-4625-8471-f91ad349cc84 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Lock "5627f913-29d2-476e-8fde-8ea457cc56f2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 780.828035] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05f50edf-b37b-4865-b831-3e39be35e2f9 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.902313] env[62096]: DEBUG nova.compute.manager [req-763b1b43-03a0-487a-bb31-717ac5dc9a78 req-3330c697-6661-4351-ad92-3c9b31171abe service nova] [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] Received event network-changed-b29ebaaf-3fa3-40c7-86c7-fc7395ca11ff {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 780.902313] env[62096]: DEBUG nova.compute.manager [req-763b1b43-03a0-487a-bb31-717ac5dc9a78 req-3330c697-6661-4351-ad92-3c9b31171abe service nova] [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] Refreshing instance network info cache due to event network-changed-b29ebaaf-3fa3-40c7-86c7-fc7395ca11ff. {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 780.902313] env[62096]: DEBUG oslo_concurrency.lockutils [req-763b1b43-03a0-487a-bb31-717ac5dc9a78 req-3330c697-6661-4351-ad92-3c9b31171abe service nova] Acquiring lock "refresh_cache-cffc0ebc-4fb1-47c9-8882-b8431046ef2f" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 780.902498] env[62096]: DEBUG oslo_concurrency.lockutils [req-763b1b43-03a0-487a-bb31-717ac5dc9a78 req-3330c697-6661-4351-ad92-3c9b31171abe service nova] Acquired lock "refresh_cache-cffc0ebc-4fb1-47c9-8882-b8431046ef2f" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 780.902568] env[62096]: DEBUG nova.network.neutron [req-763b1b43-03a0-487a-bb31-717ac5dc9a78 req-3330c697-6661-4351-ad92-3c9b31171abe service nova] [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] Refreshing network info cache for port b29ebaaf-3fa3-40c7-86c7-fc7395ca11ff {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 780.903151] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-763b1b43-03a0-487a-bb31-717ac5dc9a78 req-3330c697-6661-4351-ad92-3c9b31171abe service nova] Expecting reply to msg fd83641c03c04cc0a5643af538985e18 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 780.910283] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fd83641c03c04cc0a5643af538985e18 [ 780.970957] env[62096]: ERROR nova.compute.manager [None req-143f4a25-9058-4c14-8b18-575eda4d5e74 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port b29ebaaf-3fa3-40c7-86c7-fc7395ca11ff, please check neutron logs for more information. [ 780.970957] env[62096]: ERROR nova.compute.manager Traceback (most recent call last): [ 780.970957] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 780.970957] env[62096]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 780.970957] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 780.970957] env[62096]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 780.970957] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 780.970957] env[62096]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 780.970957] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 780.970957] env[62096]: ERROR nova.compute.manager self.force_reraise() [ 780.970957] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 780.970957] env[62096]: ERROR nova.compute.manager raise self.value [ 780.970957] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 780.970957] env[62096]: ERROR nova.compute.manager updated_port = self._update_port( [ 780.970957] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 780.970957] env[62096]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 780.971685] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 780.971685] env[62096]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 780.971685] env[62096]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port b29ebaaf-3fa3-40c7-86c7-fc7395ca11ff, please check neutron logs for more information. [ 780.971685] env[62096]: ERROR nova.compute.manager [ 780.971685] env[62096]: Traceback (most recent call last): [ 780.971685] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 780.971685] env[62096]: listener.cb(fileno) [ 780.971685] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 780.971685] env[62096]: result = function(*args, **kwargs) [ 780.971685] env[62096]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 780.971685] env[62096]: return func(*args, **kwargs) [ 780.971685] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 780.971685] env[62096]: raise e [ 780.971685] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 780.971685] env[62096]: nwinfo = self.network_api.allocate_for_instance( [ 780.971685] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 780.971685] env[62096]: created_port_ids = self._update_ports_for_instance( [ 780.971685] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 780.971685] env[62096]: with excutils.save_and_reraise_exception(): [ 780.971685] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 780.971685] env[62096]: self.force_reraise() [ 780.971685] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 780.971685] env[62096]: raise self.value [ 780.971685] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 780.971685] env[62096]: updated_port = self._update_port( [ 780.971685] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 780.971685] env[62096]: _ensure_no_port_binding_failure(port) [ 780.971685] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 780.971685] env[62096]: raise exception.PortBindingFailed(port_id=port['id']) [ 780.972960] env[62096]: nova.exception.PortBindingFailed: Binding failed for port b29ebaaf-3fa3-40c7-86c7-fc7395ca11ff, please check neutron logs for more information. [ 780.972960] env[62096]: Removing descriptor: 16 [ 780.972960] env[62096]: ERROR nova.compute.manager [None req-143f4a25-9058-4c14-8b18-575eda4d5e74 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port b29ebaaf-3fa3-40c7-86c7-fc7395ca11ff, please check neutron logs for more information. [ 780.972960] env[62096]: ERROR nova.compute.manager [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] Traceback (most recent call last): [ 780.972960] env[62096]: ERROR nova.compute.manager [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 780.972960] env[62096]: ERROR nova.compute.manager [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] yield resources [ 780.972960] env[62096]: ERROR nova.compute.manager [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 780.972960] env[62096]: ERROR nova.compute.manager [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] self.driver.spawn(context, instance, image_meta, [ 780.972960] env[62096]: ERROR nova.compute.manager [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 780.972960] env[62096]: ERROR nova.compute.manager [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 780.972960] env[62096]: ERROR nova.compute.manager [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 780.972960] env[62096]: ERROR nova.compute.manager [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] vm_ref = self.build_virtual_machine(instance, [ 780.973523] env[62096]: ERROR nova.compute.manager [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 780.973523] env[62096]: ERROR nova.compute.manager [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] vif_infos = vmwarevif.get_vif_info(self._session, [ 780.973523] env[62096]: ERROR nova.compute.manager [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 780.973523] env[62096]: ERROR nova.compute.manager [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] for vif in network_info: [ 780.973523] env[62096]: ERROR nova.compute.manager [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 780.973523] env[62096]: ERROR nova.compute.manager [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] return self._sync_wrapper(fn, *args, **kwargs) [ 780.973523] env[62096]: ERROR nova.compute.manager [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 780.973523] env[62096]: ERROR nova.compute.manager [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] self.wait() [ 780.973523] env[62096]: ERROR nova.compute.manager [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 780.973523] env[62096]: ERROR nova.compute.manager [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] self[:] = self._gt.wait() [ 780.973523] env[62096]: ERROR nova.compute.manager [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 780.973523] env[62096]: ERROR nova.compute.manager [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] return self._exit_event.wait() [ 780.973523] env[62096]: ERROR nova.compute.manager [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 780.974287] env[62096]: ERROR nova.compute.manager [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] result = hub.switch() [ 780.974287] env[62096]: ERROR nova.compute.manager [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 780.974287] env[62096]: ERROR nova.compute.manager [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] return self.greenlet.switch() [ 780.974287] env[62096]: ERROR nova.compute.manager [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 780.974287] env[62096]: ERROR nova.compute.manager [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] result = function(*args, **kwargs) [ 780.974287] env[62096]: ERROR nova.compute.manager [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 780.974287] env[62096]: ERROR nova.compute.manager [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] return func(*args, **kwargs) [ 780.974287] env[62096]: ERROR nova.compute.manager [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 780.974287] env[62096]: ERROR nova.compute.manager [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] raise e [ 780.974287] env[62096]: ERROR nova.compute.manager [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 780.974287] env[62096]: ERROR nova.compute.manager [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] nwinfo = self.network_api.allocate_for_instance( [ 780.974287] env[62096]: ERROR nova.compute.manager [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 780.974287] env[62096]: ERROR nova.compute.manager [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] created_port_ids = self._update_ports_for_instance( [ 780.974980] env[62096]: ERROR nova.compute.manager [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 780.974980] env[62096]: ERROR nova.compute.manager [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] with excutils.save_and_reraise_exception(): [ 780.974980] env[62096]: ERROR nova.compute.manager [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 780.974980] env[62096]: ERROR nova.compute.manager [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] self.force_reraise() [ 780.974980] env[62096]: ERROR nova.compute.manager [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 780.974980] env[62096]: ERROR nova.compute.manager [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] raise self.value [ 780.974980] env[62096]: ERROR nova.compute.manager [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 780.974980] env[62096]: ERROR nova.compute.manager [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] updated_port = self._update_port( [ 780.974980] env[62096]: ERROR nova.compute.manager [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 780.974980] env[62096]: ERROR nova.compute.manager [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] _ensure_no_port_binding_failure(port) [ 780.974980] env[62096]: ERROR nova.compute.manager [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 780.974980] env[62096]: ERROR nova.compute.manager [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] raise exception.PortBindingFailed(port_id=port['id']) [ 780.975504] env[62096]: ERROR nova.compute.manager [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] nova.exception.PortBindingFailed: Binding failed for port b29ebaaf-3fa3-40c7-86c7-fc7395ca11ff, please check neutron logs for more information. [ 780.975504] env[62096]: ERROR nova.compute.manager [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] [ 780.975504] env[62096]: INFO nova.compute.manager [None req-143f4a25-9058-4c14-8b18-575eda4d5e74 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] Terminating instance [ 780.976210] env[62096]: DEBUG oslo_concurrency.lockutils [None req-143f4a25-9058-4c14-8b18-575eda4d5e74 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Acquiring lock "refresh_cache-cffc0ebc-4fb1-47c9-8882-b8431046ef2f" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 781.018728] env[62096]: DEBUG oslo_concurrency.lockutils [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 781.191302] env[62096]: DEBUG oslo_concurrency.lockutils [None req-a3983baa-43a6-408d-8a39-88c7bf1db267 tempest-TenantUsagesTestJSON-1395762051 tempest-TenantUsagesTestJSON-1395762051-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.425s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 781.191812] env[62096]: DEBUG nova.compute.manager [None req-a3983baa-43a6-408d-8a39-88c7bf1db267 tempest-TenantUsagesTestJSON-1395762051 tempest-TenantUsagesTestJSON-1395762051-project-member] [instance: e8631ec4-1823-46d2-8553-05e3336fed32] Start building networks asynchronously for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 781.193552] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a3983baa-43a6-408d-8a39-88c7bf1db267 tempest-TenantUsagesTestJSON-1395762051 tempest-TenantUsagesTestJSON-1395762051-project-member] Expecting reply to msg 2da037a2c4a94670af2f19615b2425da in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 781.194518] env[62096]: DEBUG oslo_concurrency.lockutils [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 15.721s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 781.195241] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg 517c05ee9d11460fb99e3ba9fff1f7bd in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 781.218274] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 517c05ee9d11460fb99e3ba9fff1f7bd [ 781.222763] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2da037a2c4a94670af2f19615b2425da [ 781.418522] env[62096]: DEBUG nova.network.neutron [req-763b1b43-03a0-487a-bb31-717ac5dc9a78 req-3330c697-6661-4351-ad92-3c9b31171abe service nova] [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 781.516938] env[62096]: DEBUG nova.network.neutron [req-763b1b43-03a0-487a-bb31-717ac5dc9a78 req-3330c697-6661-4351-ad92-3c9b31171abe service nova] [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 781.517448] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-763b1b43-03a0-487a-bb31-717ac5dc9a78 req-3330c697-6661-4351-ad92-3c9b31171abe service nova] Expecting reply to msg 02abcd2afab24aa0b616e409d1eb56af in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 781.526412] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 02abcd2afab24aa0b616e409d1eb56af [ 781.698101] env[62096]: DEBUG nova.compute.utils [None req-a3983baa-43a6-408d-8a39-88c7bf1db267 tempest-TenantUsagesTestJSON-1395762051 tempest-TenantUsagesTestJSON-1395762051-project-member] Using /dev/sd instead of None {{(pid=62096) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 781.698739] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a3983baa-43a6-408d-8a39-88c7bf1db267 tempest-TenantUsagesTestJSON-1395762051 tempest-TenantUsagesTestJSON-1395762051-project-member] Expecting reply to msg 71bf3bab65fa41338496b4859a3bbc9c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 781.701784] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg 779d630f3f95451ab79f77da1fbda477 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 781.702378] env[62096]: DEBUG nova.compute.manager [None req-a3983baa-43a6-408d-8a39-88c7bf1db267 tempest-TenantUsagesTestJSON-1395762051 tempest-TenantUsagesTestJSON-1395762051-project-member] [instance: e8631ec4-1823-46d2-8553-05e3336fed32] Allocating IP information in the background. {{(pid=62096) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 781.702533] env[62096]: DEBUG nova.network.neutron [None req-a3983baa-43a6-408d-8a39-88c7bf1db267 tempest-TenantUsagesTestJSON-1395762051 tempest-TenantUsagesTestJSON-1395762051-project-member] [instance: e8631ec4-1823-46d2-8553-05e3336fed32] allocate_for_instance() {{(pid=62096) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 781.711614] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 71bf3bab65fa41338496b4859a3bbc9c [ 781.712029] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 779d630f3f95451ab79f77da1fbda477 [ 781.795945] env[62096]: DEBUG nova.policy [None req-a3983baa-43a6-408d-8a39-88c7bf1db267 tempest-TenantUsagesTestJSON-1395762051 tempest-TenantUsagesTestJSON-1395762051-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '696db86c757f4bdba20585e97f6c0d55', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1436bbce2b034787872598eec0906da2', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62096) authorize /opt/stack/nova/nova/policy.py:203}} [ 782.020536] env[62096]: DEBUG oslo_concurrency.lockutils [req-763b1b43-03a0-487a-bb31-717ac5dc9a78 req-3330c697-6661-4351-ad92-3c9b31171abe service nova] Releasing lock "refresh_cache-cffc0ebc-4fb1-47c9-8882-b8431046ef2f" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 782.020536] env[62096]: DEBUG oslo_concurrency.lockutils [None req-143f4a25-9058-4c14-8b18-575eda4d5e74 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Acquired lock "refresh_cache-cffc0ebc-4fb1-47c9-8882-b8431046ef2f" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 782.020837] env[62096]: DEBUG nova.network.neutron [None req-143f4a25-9058-4c14-8b18-575eda4d5e74 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 782.021123] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-143f4a25-9058-4c14-8b18-575eda4d5e74 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Expecting reply to msg 42330d7650264121b9164e59da7f6d36 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 782.030876] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 42330d7650264121b9164e59da7f6d36 [ 782.205226] env[62096]: DEBUG nova.compute.manager [None req-a3983baa-43a6-408d-8a39-88c7bf1db267 tempest-TenantUsagesTestJSON-1395762051 tempest-TenantUsagesTestJSON-1395762051-project-member] [instance: e8631ec4-1823-46d2-8553-05e3336fed32] Start building block device mappings for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 782.205830] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a3983baa-43a6-408d-8a39-88c7bf1db267 tempest-TenantUsagesTestJSON-1395762051 tempest-TenantUsagesTestJSON-1395762051-project-member] Expecting reply to msg 81b647290adf415497108e5ea15d60d0 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 782.230172] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Instance 510189aa-204c-4fd6-90d5-47a7ce5f7630 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62096) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 782.230334] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Instance de88f8a1-20a5-49f9-adcb-de48aeaa548a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62096) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 782.230456] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Instance d7bd302c-1d85-45c2-9a3e-9855a6488d92 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62096) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 782.230575] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Instance cffc0ebc-4fb1-47c9-8882-b8431046ef2f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62096) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 782.230686] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Instance e8631ec4-1823-46d2-8553-05e3336fed32 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62096) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 782.231234] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg 26aed4a7d5e9408a9bccd652d6995be3 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 782.241337] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 81b647290adf415497108e5ea15d60d0 [ 782.249223] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 26aed4a7d5e9408a9bccd652d6995be3 [ 782.372542] env[62096]: DEBUG nova.network.neutron [None req-a3983baa-43a6-408d-8a39-88c7bf1db267 tempest-TenantUsagesTestJSON-1395762051 tempest-TenantUsagesTestJSON-1395762051-project-member] [instance: e8631ec4-1823-46d2-8553-05e3336fed32] Successfully created port: 89fd1d75-07b4-4483-9ab1-b63973730b8e {{(pid=62096) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 782.547559] env[62096]: DEBUG nova.network.neutron [None req-143f4a25-9058-4c14-8b18-575eda4d5e74 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 782.645366] env[62096]: DEBUG nova.network.neutron [None req-143f4a25-9058-4c14-8b18-575eda4d5e74 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 782.645872] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-143f4a25-9058-4c14-8b18-575eda4d5e74 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Expecting reply to msg 67dbc7473a454edcb72cd667c6ba9229 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 782.655496] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 67dbc7473a454edcb72cd667c6ba9229 [ 782.716617] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a3983baa-43a6-408d-8a39-88c7bf1db267 tempest-TenantUsagesTestJSON-1395762051 tempest-TenantUsagesTestJSON-1395762051-project-member] Expecting reply to msg 7d9c662ddd4f49ea951fdccc8b5d3155 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 782.733665] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Instance f050f0de-dc84-4825-b490-eafe522354cd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62096) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 782.734282] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg f5bdf2294692433b94f9ec1ab856ff91 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 782.744766] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f5bdf2294692433b94f9ec1ab856ff91 [ 782.760184] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7d9c662ddd4f49ea951fdccc8b5d3155 [ 783.089455] env[62096]: DEBUG nova.compute.manager [req-7a08582b-bcc7-43ae-b969-9a796b3d7d45 req-a6032e96-b664-4126-8502-78c4cfb97dd4 service nova] [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] Received event network-vif-deleted-b29ebaaf-3fa3-40c7-86c7-fc7395ca11ff {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 783.156040] env[62096]: DEBUG oslo_concurrency.lockutils [None req-143f4a25-9058-4c14-8b18-575eda4d5e74 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Releasing lock "refresh_cache-cffc0ebc-4fb1-47c9-8882-b8431046ef2f" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 783.156490] env[62096]: DEBUG nova.compute.manager [None req-143f4a25-9058-4c14-8b18-575eda4d5e74 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] Start destroying the instance on the hypervisor. {{(pid=62096) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 783.156653] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-143f4a25-9058-4c14-8b18-575eda4d5e74 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] Destroying instance {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 783.156940] env[62096]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8f0ccadd-4152-417b-9d3c-6a47685fd931 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.165580] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c193ef42-debb-4314-a297-d4e22a1c0a17 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.185977] env[62096]: WARNING nova.virt.vmwareapi.vmops [None req-143f4a25-9058-4c14-8b18-575eda4d5e74 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance cffc0ebc-4fb1-47c9-8882-b8431046ef2f could not be found. [ 783.186182] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-143f4a25-9058-4c14-8b18-575eda4d5e74 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] Instance destroyed {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 783.186430] env[62096]: INFO nova.compute.manager [None req-143f4a25-9058-4c14-8b18-575eda4d5e74 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] Took 0.03 seconds to destroy the instance on the hypervisor. [ 783.186680] env[62096]: DEBUG oslo.service.loopingcall [None req-143f4a25-9058-4c14-8b18-575eda4d5e74 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 783.186883] env[62096]: DEBUG nova.compute.manager [-] [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 783.186971] env[62096]: DEBUG nova.network.neutron [-] [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 783.211228] env[62096]: DEBUG nova.network.neutron [-] [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 783.211783] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 8f9aba2c4a974bb1b628845faf5b0cf4 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 783.218837] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8f9aba2c4a974bb1b628845faf5b0cf4 [ 783.220164] env[62096]: DEBUG nova.compute.manager [None req-a3983baa-43a6-408d-8a39-88c7bf1db267 tempest-TenantUsagesTestJSON-1395762051 tempest-TenantUsagesTestJSON-1395762051-project-member] [instance: e8631ec4-1823-46d2-8553-05e3336fed32] Start spawning the instance on the hypervisor. {{(pid=62096) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 783.236676] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Instance 3283ae52-1a1f-4dec-91f3-44cc42361bb5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62096) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 783.237512] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg 7b3654bc27a8445f9430e9c34013c749 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 783.256963] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7b3654bc27a8445f9430e9c34013c749 [ 783.258061] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Instance c928bf83-9517-449a-854c-6f3d8ce4faa0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62096) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 783.258608] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg 431d41aa8ebd47e689432f3b1fb873bf in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 783.267869] env[62096]: DEBUG nova.virt.hardware [None req-a3983baa-43a6-408d-8a39-88c7bf1db267 tempest-TenantUsagesTestJSON-1395762051 tempest-TenantUsagesTestJSON-1395762051-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-19T11:52:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-19T11:51:55Z,direct_url=,disk_format='vmdk',id=fb764baa-3805-45c4-a694-aa91b0932110,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bd76085d004b408bbcf1ebf76ba2593e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-19T11:51:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 783.267869] env[62096]: DEBUG nova.virt.hardware [None req-a3983baa-43a6-408d-8a39-88c7bf1db267 tempest-TenantUsagesTestJSON-1395762051 tempest-TenantUsagesTestJSON-1395762051-project-member] Flavor limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 783.267869] env[62096]: DEBUG nova.virt.hardware [None req-a3983baa-43a6-408d-8a39-88c7bf1db267 tempest-TenantUsagesTestJSON-1395762051 tempest-TenantUsagesTestJSON-1395762051-project-member] Image limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 783.267998] env[62096]: DEBUG nova.virt.hardware [None req-a3983baa-43a6-408d-8a39-88c7bf1db267 tempest-TenantUsagesTestJSON-1395762051 tempest-TenantUsagesTestJSON-1395762051-project-member] Flavor pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 783.267998] env[62096]: DEBUG nova.virt.hardware [None req-a3983baa-43a6-408d-8a39-88c7bf1db267 tempest-TenantUsagesTestJSON-1395762051 tempest-TenantUsagesTestJSON-1395762051-project-member] Image pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 783.267998] env[62096]: DEBUG nova.virt.hardware [None req-a3983baa-43a6-408d-8a39-88c7bf1db267 tempest-TenantUsagesTestJSON-1395762051 tempest-TenantUsagesTestJSON-1395762051-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 783.267998] env[62096]: DEBUG nova.virt.hardware [None req-a3983baa-43a6-408d-8a39-88c7bf1db267 tempest-TenantUsagesTestJSON-1395762051 tempest-TenantUsagesTestJSON-1395762051-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 783.267998] env[62096]: DEBUG nova.virt.hardware [None req-a3983baa-43a6-408d-8a39-88c7bf1db267 tempest-TenantUsagesTestJSON-1395762051 tempest-TenantUsagesTestJSON-1395762051-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 783.268160] env[62096]: DEBUG nova.virt.hardware [None req-a3983baa-43a6-408d-8a39-88c7bf1db267 tempest-TenantUsagesTestJSON-1395762051 tempest-TenantUsagesTestJSON-1395762051-project-member] Got 1 possible topologies {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 783.268160] env[62096]: DEBUG nova.virt.hardware [None req-a3983baa-43a6-408d-8a39-88c7bf1db267 tempest-TenantUsagesTestJSON-1395762051 tempest-TenantUsagesTestJSON-1395762051-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 783.268160] env[62096]: DEBUG nova.virt.hardware [None req-a3983baa-43a6-408d-8a39-88c7bf1db267 tempest-TenantUsagesTestJSON-1395762051 tempest-TenantUsagesTestJSON-1395762051-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 783.268304] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0af1bd8d-03b9-4e78-97ff-ea43ad13353b {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.271100] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 431d41aa8ebd47e689432f3b1fb873bf [ 783.280031] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43cc058a-1952-482e-98f8-0d27b590bde6 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.714340] env[62096]: DEBUG nova.network.neutron [-] [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 783.714833] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 60f708fcdeb944319f7fd13ba2ddef12 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 783.725223] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 60f708fcdeb944319f7fd13ba2ddef12 [ 783.765605] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Instance a5b5afc7-abc0-430a-b682-2c1946b4a6d8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62096) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 783.766195] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg 2ffd2e1759394bbb8d2a20a4412a9e6a in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 783.779912] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2ffd2e1759394bbb8d2a20a4412a9e6a [ 784.133196] env[62096]: ERROR nova.compute.manager [None req-a3983baa-43a6-408d-8a39-88c7bf1db267 tempest-TenantUsagesTestJSON-1395762051 tempest-TenantUsagesTestJSON-1395762051-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 89fd1d75-07b4-4483-9ab1-b63973730b8e, please check neutron logs for more information. [ 784.133196] env[62096]: ERROR nova.compute.manager Traceback (most recent call last): [ 784.133196] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 784.133196] env[62096]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 784.133196] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 784.133196] env[62096]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 784.133196] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 784.133196] env[62096]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 784.133196] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 784.133196] env[62096]: ERROR nova.compute.manager self.force_reraise() [ 784.133196] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 784.133196] env[62096]: ERROR nova.compute.manager raise self.value [ 784.133196] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 784.133196] env[62096]: ERROR nova.compute.manager updated_port = self._update_port( [ 784.133196] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 784.133196] env[62096]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 784.133866] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 784.133866] env[62096]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 784.133866] env[62096]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 89fd1d75-07b4-4483-9ab1-b63973730b8e, please check neutron logs for more information. [ 784.133866] env[62096]: ERROR nova.compute.manager [ 784.133866] env[62096]: Traceback (most recent call last): [ 784.133866] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 784.133866] env[62096]: listener.cb(fileno) [ 784.133866] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 784.133866] env[62096]: result = function(*args, **kwargs) [ 784.133866] env[62096]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 784.133866] env[62096]: return func(*args, **kwargs) [ 784.133866] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 784.133866] env[62096]: raise e [ 784.133866] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 784.133866] env[62096]: nwinfo = self.network_api.allocate_for_instance( [ 784.133866] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 784.133866] env[62096]: created_port_ids = self._update_ports_for_instance( [ 784.133866] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 784.133866] env[62096]: with excutils.save_and_reraise_exception(): [ 784.133866] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 784.133866] env[62096]: self.force_reraise() [ 784.133866] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 784.133866] env[62096]: raise self.value [ 784.133866] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 784.133866] env[62096]: updated_port = self._update_port( [ 784.133866] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 784.133866] env[62096]: _ensure_no_port_binding_failure(port) [ 784.133866] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 784.133866] env[62096]: raise exception.PortBindingFailed(port_id=port['id']) [ 784.134829] env[62096]: nova.exception.PortBindingFailed: Binding failed for port 89fd1d75-07b4-4483-9ab1-b63973730b8e, please check neutron logs for more information. [ 784.134829] env[62096]: Removing descriptor: 16 [ 784.134829] env[62096]: ERROR nova.compute.manager [None req-a3983baa-43a6-408d-8a39-88c7bf1db267 tempest-TenantUsagesTestJSON-1395762051 tempest-TenantUsagesTestJSON-1395762051-project-member] [instance: e8631ec4-1823-46d2-8553-05e3336fed32] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 89fd1d75-07b4-4483-9ab1-b63973730b8e, please check neutron logs for more information. [ 784.134829] env[62096]: ERROR nova.compute.manager [instance: e8631ec4-1823-46d2-8553-05e3336fed32] Traceback (most recent call last): [ 784.134829] env[62096]: ERROR nova.compute.manager [instance: e8631ec4-1823-46d2-8553-05e3336fed32] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 784.134829] env[62096]: ERROR nova.compute.manager [instance: e8631ec4-1823-46d2-8553-05e3336fed32] yield resources [ 784.134829] env[62096]: ERROR nova.compute.manager [instance: e8631ec4-1823-46d2-8553-05e3336fed32] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 784.134829] env[62096]: ERROR nova.compute.manager [instance: e8631ec4-1823-46d2-8553-05e3336fed32] self.driver.spawn(context, instance, image_meta, [ 784.134829] env[62096]: ERROR nova.compute.manager [instance: e8631ec4-1823-46d2-8553-05e3336fed32] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 784.134829] env[62096]: ERROR nova.compute.manager [instance: e8631ec4-1823-46d2-8553-05e3336fed32] self._vmops.spawn(context, instance, image_meta, injected_files, [ 784.134829] env[62096]: ERROR nova.compute.manager [instance: e8631ec4-1823-46d2-8553-05e3336fed32] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 784.134829] env[62096]: ERROR nova.compute.manager [instance: e8631ec4-1823-46d2-8553-05e3336fed32] vm_ref = self.build_virtual_machine(instance, [ 784.135201] env[62096]: ERROR nova.compute.manager [instance: e8631ec4-1823-46d2-8553-05e3336fed32] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 784.135201] env[62096]: ERROR nova.compute.manager [instance: e8631ec4-1823-46d2-8553-05e3336fed32] vif_infos = vmwarevif.get_vif_info(self._session, [ 784.135201] env[62096]: ERROR nova.compute.manager [instance: e8631ec4-1823-46d2-8553-05e3336fed32] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 784.135201] env[62096]: ERROR nova.compute.manager [instance: e8631ec4-1823-46d2-8553-05e3336fed32] for vif in network_info: [ 784.135201] env[62096]: ERROR nova.compute.manager [instance: e8631ec4-1823-46d2-8553-05e3336fed32] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 784.135201] env[62096]: ERROR nova.compute.manager [instance: e8631ec4-1823-46d2-8553-05e3336fed32] return self._sync_wrapper(fn, *args, **kwargs) [ 784.135201] env[62096]: ERROR nova.compute.manager [instance: e8631ec4-1823-46d2-8553-05e3336fed32] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 784.135201] env[62096]: ERROR nova.compute.manager [instance: e8631ec4-1823-46d2-8553-05e3336fed32] self.wait() [ 784.135201] env[62096]: ERROR nova.compute.manager [instance: e8631ec4-1823-46d2-8553-05e3336fed32] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 784.135201] env[62096]: ERROR nova.compute.manager [instance: e8631ec4-1823-46d2-8553-05e3336fed32] self[:] = self._gt.wait() [ 784.135201] env[62096]: ERROR nova.compute.manager [instance: e8631ec4-1823-46d2-8553-05e3336fed32] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 784.135201] env[62096]: ERROR nova.compute.manager [instance: e8631ec4-1823-46d2-8553-05e3336fed32] return self._exit_event.wait() [ 784.135201] env[62096]: ERROR nova.compute.manager [instance: e8631ec4-1823-46d2-8553-05e3336fed32] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 784.135661] env[62096]: ERROR nova.compute.manager [instance: e8631ec4-1823-46d2-8553-05e3336fed32] result = hub.switch() [ 784.135661] env[62096]: ERROR nova.compute.manager [instance: e8631ec4-1823-46d2-8553-05e3336fed32] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 784.135661] env[62096]: ERROR nova.compute.manager [instance: e8631ec4-1823-46d2-8553-05e3336fed32] return self.greenlet.switch() [ 784.135661] env[62096]: ERROR nova.compute.manager [instance: e8631ec4-1823-46d2-8553-05e3336fed32] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 784.135661] env[62096]: ERROR nova.compute.manager [instance: e8631ec4-1823-46d2-8553-05e3336fed32] result = function(*args, **kwargs) [ 784.135661] env[62096]: ERROR nova.compute.manager [instance: e8631ec4-1823-46d2-8553-05e3336fed32] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 784.135661] env[62096]: ERROR nova.compute.manager [instance: e8631ec4-1823-46d2-8553-05e3336fed32] return func(*args, **kwargs) [ 784.135661] env[62096]: ERROR nova.compute.manager [instance: e8631ec4-1823-46d2-8553-05e3336fed32] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 784.135661] env[62096]: ERROR nova.compute.manager [instance: e8631ec4-1823-46d2-8553-05e3336fed32] raise e [ 784.135661] env[62096]: ERROR nova.compute.manager [instance: e8631ec4-1823-46d2-8553-05e3336fed32] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 784.135661] env[62096]: ERROR nova.compute.manager [instance: e8631ec4-1823-46d2-8553-05e3336fed32] nwinfo = self.network_api.allocate_for_instance( [ 784.135661] env[62096]: ERROR nova.compute.manager [instance: e8631ec4-1823-46d2-8553-05e3336fed32] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 784.135661] env[62096]: ERROR nova.compute.manager [instance: e8631ec4-1823-46d2-8553-05e3336fed32] created_port_ids = self._update_ports_for_instance( [ 784.136049] env[62096]: ERROR nova.compute.manager [instance: e8631ec4-1823-46d2-8553-05e3336fed32] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 784.136049] env[62096]: ERROR nova.compute.manager [instance: e8631ec4-1823-46d2-8553-05e3336fed32] with excutils.save_and_reraise_exception(): [ 784.136049] env[62096]: ERROR nova.compute.manager [instance: e8631ec4-1823-46d2-8553-05e3336fed32] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 784.136049] env[62096]: ERROR nova.compute.manager [instance: e8631ec4-1823-46d2-8553-05e3336fed32] self.force_reraise() [ 784.136049] env[62096]: ERROR nova.compute.manager [instance: e8631ec4-1823-46d2-8553-05e3336fed32] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 784.136049] env[62096]: ERROR nova.compute.manager [instance: e8631ec4-1823-46d2-8553-05e3336fed32] raise self.value [ 784.136049] env[62096]: ERROR nova.compute.manager [instance: e8631ec4-1823-46d2-8553-05e3336fed32] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 784.136049] env[62096]: ERROR nova.compute.manager [instance: e8631ec4-1823-46d2-8553-05e3336fed32] updated_port = self._update_port( [ 784.136049] env[62096]: ERROR nova.compute.manager [instance: e8631ec4-1823-46d2-8553-05e3336fed32] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 784.136049] env[62096]: ERROR nova.compute.manager [instance: e8631ec4-1823-46d2-8553-05e3336fed32] _ensure_no_port_binding_failure(port) [ 784.136049] env[62096]: ERROR nova.compute.manager [instance: e8631ec4-1823-46d2-8553-05e3336fed32] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 784.136049] env[62096]: ERROR nova.compute.manager [instance: e8631ec4-1823-46d2-8553-05e3336fed32] raise exception.PortBindingFailed(port_id=port['id']) [ 784.136440] env[62096]: ERROR nova.compute.manager [instance: e8631ec4-1823-46d2-8553-05e3336fed32] nova.exception.PortBindingFailed: Binding failed for port 89fd1d75-07b4-4483-9ab1-b63973730b8e, please check neutron logs for more information. [ 784.136440] env[62096]: ERROR nova.compute.manager [instance: e8631ec4-1823-46d2-8553-05e3336fed32] [ 784.136440] env[62096]: INFO nova.compute.manager [None req-a3983baa-43a6-408d-8a39-88c7bf1db267 tempest-TenantUsagesTestJSON-1395762051 tempest-TenantUsagesTestJSON-1395762051-project-member] [instance: e8631ec4-1823-46d2-8553-05e3336fed32] Terminating instance [ 784.136440] env[62096]: DEBUG oslo_concurrency.lockutils [None req-a3983baa-43a6-408d-8a39-88c7bf1db267 tempest-TenantUsagesTestJSON-1395762051 tempest-TenantUsagesTestJSON-1395762051-project-member] Acquiring lock "refresh_cache-e8631ec4-1823-46d2-8553-05e3336fed32" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 784.136578] env[62096]: DEBUG oslo_concurrency.lockutils [None req-a3983baa-43a6-408d-8a39-88c7bf1db267 tempest-TenantUsagesTestJSON-1395762051 tempest-TenantUsagesTestJSON-1395762051-project-member] Acquired lock "refresh_cache-e8631ec4-1823-46d2-8553-05e3336fed32" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 784.136739] env[62096]: DEBUG nova.network.neutron [None req-a3983baa-43a6-408d-8a39-88c7bf1db267 tempest-TenantUsagesTestJSON-1395762051 tempest-TenantUsagesTestJSON-1395762051-project-member] [instance: e8631ec4-1823-46d2-8553-05e3336fed32] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 784.137245] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a3983baa-43a6-408d-8a39-88c7bf1db267 tempest-TenantUsagesTestJSON-1395762051 tempest-TenantUsagesTestJSON-1395762051-project-member] Expecting reply to msg 8e5e076b9de24115b24fe59fa8a75603 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 784.143997] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8e5e076b9de24115b24fe59fa8a75603 [ 784.217636] env[62096]: INFO nova.compute.manager [-] [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] Took 1.03 seconds to deallocate network for instance. [ 784.219379] env[62096]: DEBUG nova.compute.claims [None req-143f4a25-9058-4c14-8b18-575eda4d5e74 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] Aborting claim: {{(pid=62096) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 784.219562] env[62096]: DEBUG oslo_concurrency.lockutils [None req-143f4a25-9058-4c14-8b18-575eda4d5e74 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 784.268508] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Instance 4ce3f48e-e45c-4628-8c35-8493c655a6f3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62096) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 784.269074] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg 9cd02b0025994465adc1f541022dd8c9 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 784.279452] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9cd02b0025994465adc1f541022dd8c9 [ 784.659390] env[62096]: DEBUG nova.network.neutron [None req-a3983baa-43a6-408d-8a39-88c7bf1db267 tempest-TenantUsagesTestJSON-1395762051 tempest-TenantUsagesTestJSON-1395762051-project-member] [instance: e8631ec4-1823-46d2-8553-05e3336fed32] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 784.707036] env[62096]: DEBUG nova.network.neutron [None req-a3983baa-43a6-408d-8a39-88c7bf1db267 tempest-TenantUsagesTestJSON-1395762051 tempest-TenantUsagesTestJSON-1395762051-project-member] [instance: e8631ec4-1823-46d2-8553-05e3336fed32] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 784.707537] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a3983baa-43a6-408d-8a39-88c7bf1db267 tempest-TenantUsagesTestJSON-1395762051 tempest-TenantUsagesTestJSON-1395762051-project-member] Expecting reply to msg 569b2d14aea04c3a87c1f791ef61f47b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 784.716249] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 569b2d14aea04c3a87c1f791ef61f47b [ 784.771238] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Instance 7a85b800-725c-4d91-90bd-2056eb2fb116 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62096) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 784.771809] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg 2628da32bf0b40ae93bde4aa09c3949a in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 784.782556] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2628da32bf0b40ae93bde4aa09c3949a [ 785.123913] env[62096]: DEBUG nova.compute.manager [req-7d6da94c-9f42-4e69-b0fc-51a301a6a69b req-b0ecbecc-380f-403b-8cca-79a2fcd4b7c9 service nova] [instance: e8631ec4-1823-46d2-8553-05e3336fed32] Received event network-changed-89fd1d75-07b4-4483-9ab1-b63973730b8e {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 785.124221] env[62096]: DEBUG nova.compute.manager [req-7d6da94c-9f42-4e69-b0fc-51a301a6a69b req-b0ecbecc-380f-403b-8cca-79a2fcd4b7c9 service nova] [instance: e8631ec4-1823-46d2-8553-05e3336fed32] Refreshing instance network info cache due to event network-changed-89fd1d75-07b4-4483-9ab1-b63973730b8e. {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 785.124406] env[62096]: DEBUG oslo_concurrency.lockutils [req-7d6da94c-9f42-4e69-b0fc-51a301a6a69b req-b0ecbecc-380f-403b-8cca-79a2fcd4b7c9 service nova] Acquiring lock "refresh_cache-e8631ec4-1823-46d2-8553-05e3336fed32" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 785.209461] env[62096]: DEBUG oslo_concurrency.lockutils [None req-a3983baa-43a6-408d-8a39-88c7bf1db267 tempest-TenantUsagesTestJSON-1395762051 tempest-TenantUsagesTestJSON-1395762051-project-member] Releasing lock "refresh_cache-e8631ec4-1823-46d2-8553-05e3336fed32" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 785.209935] env[62096]: DEBUG nova.compute.manager [None req-a3983baa-43a6-408d-8a39-88c7bf1db267 tempest-TenantUsagesTestJSON-1395762051 tempest-TenantUsagesTestJSON-1395762051-project-member] [instance: e8631ec4-1823-46d2-8553-05e3336fed32] Start destroying the instance on the hypervisor. {{(pid=62096) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 785.210124] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-a3983baa-43a6-408d-8a39-88c7bf1db267 tempest-TenantUsagesTestJSON-1395762051 tempest-TenantUsagesTestJSON-1395762051-project-member] [instance: e8631ec4-1823-46d2-8553-05e3336fed32] Destroying instance {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 785.210434] env[62096]: DEBUG oslo_concurrency.lockutils [req-7d6da94c-9f42-4e69-b0fc-51a301a6a69b req-b0ecbecc-380f-403b-8cca-79a2fcd4b7c9 service nova] Acquired lock "refresh_cache-e8631ec4-1823-46d2-8553-05e3336fed32" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 785.210595] env[62096]: DEBUG nova.network.neutron [req-7d6da94c-9f42-4e69-b0fc-51a301a6a69b req-b0ecbecc-380f-403b-8cca-79a2fcd4b7c9 service nova] [instance: e8631ec4-1823-46d2-8553-05e3336fed32] Refreshing network info cache for port 89fd1d75-07b4-4483-9ab1-b63973730b8e {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 785.211024] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-7d6da94c-9f42-4e69-b0fc-51a301a6a69b req-b0ecbecc-380f-403b-8cca-79a2fcd4b7c9 service nova] Expecting reply to msg aab5873a3017443f808ae4c63e799614 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 785.211880] env[62096]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6dda4b29-c89d-4052-b134-f8fbdb3118ad {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.218850] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg aab5873a3017443f808ae4c63e799614 [ 785.222416] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08e3d923-92b2-4d9e-bf8b-b0f5b4133769 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.243907] env[62096]: WARNING nova.virt.vmwareapi.vmops [None req-a3983baa-43a6-408d-8a39-88c7bf1db267 tempest-TenantUsagesTestJSON-1395762051 tempest-TenantUsagesTestJSON-1395762051-project-member] [instance: e8631ec4-1823-46d2-8553-05e3336fed32] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance e8631ec4-1823-46d2-8553-05e3336fed32 could not be found. [ 785.244221] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-a3983baa-43a6-408d-8a39-88c7bf1db267 tempest-TenantUsagesTestJSON-1395762051 tempest-TenantUsagesTestJSON-1395762051-project-member] [instance: e8631ec4-1823-46d2-8553-05e3336fed32] Instance destroyed {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 785.244401] env[62096]: INFO nova.compute.manager [None req-a3983baa-43a6-408d-8a39-88c7bf1db267 tempest-TenantUsagesTestJSON-1395762051 tempest-TenantUsagesTestJSON-1395762051-project-member] [instance: e8631ec4-1823-46d2-8553-05e3336fed32] Took 0.03 seconds to destroy the instance on the hypervisor. [ 785.244636] env[62096]: DEBUG oslo.service.loopingcall [None req-a3983baa-43a6-408d-8a39-88c7bf1db267 tempest-TenantUsagesTestJSON-1395762051 tempest-TenantUsagesTestJSON-1395762051-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 785.245039] env[62096]: DEBUG nova.compute.manager [-] [instance: e8631ec4-1823-46d2-8553-05e3336fed32] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 785.245039] env[62096]: DEBUG nova.network.neutron [-] [instance: e8631ec4-1823-46d2-8553-05e3336fed32] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 785.258604] env[62096]: DEBUG nova.network.neutron [-] [instance: e8631ec4-1823-46d2-8553-05e3336fed32] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 785.259106] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg f0570c6c8bb54f138e49545b318000d8 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 785.266139] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f0570c6c8bb54f138e49545b318000d8 [ 785.274703] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Instance 46232e88-ad63-42bc-bf51-2a0758e6ec3a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62096) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 785.275312] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg 19f227aa8af646d28b8f83159ebf544a in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 785.285898] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 19f227aa8af646d28b8f83159ebf544a [ 785.730524] env[62096]: DEBUG nova.network.neutron [req-7d6da94c-9f42-4e69-b0fc-51a301a6a69b req-b0ecbecc-380f-403b-8cca-79a2fcd4b7c9 service nova] [instance: e8631ec4-1823-46d2-8553-05e3336fed32] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 785.761233] env[62096]: DEBUG nova.network.neutron [-] [instance: e8631ec4-1823-46d2-8553-05e3336fed32] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 785.761676] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 4cb29df9c4714b6abf579d39eaa9c8b5 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 785.770281] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4cb29df9c4714b6abf579d39eaa9c8b5 [ 785.773701] env[62096]: DEBUG nova.network.neutron [req-7d6da94c-9f42-4e69-b0fc-51a301a6a69b req-b0ecbecc-380f-403b-8cca-79a2fcd4b7c9 service nova] [instance: e8631ec4-1823-46d2-8553-05e3336fed32] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 785.774199] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-7d6da94c-9f42-4e69-b0fc-51a301a6a69b req-b0ecbecc-380f-403b-8cca-79a2fcd4b7c9 service nova] Expecting reply to msg 780d10bcc3cd448e86bd02033dc9dfdb in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 785.777617] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Instance b54dd1f8-2e8d-446d-9145-d034664b7069 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62096) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 785.778118] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg 7704f13d379449439c6df3c06c265af6 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 785.781890] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 780d10bcc3cd448e86bd02033dc9dfdb [ 785.787016] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7704f13d379449439c6df3c06c265af6 [ 786.264236] env[62096]: INFO nova.compute.manager [-] [instance: e8631ec4-1823-46d2-8553-05e3336fed32] Took 1.02 seconds to deallocate network for instance. [ 786.266942] env[62096]: DEBUG nova.compute.claims [None req-a3983baa-43a6-408d-8a39-88c7bf1db267 tempest-TenantUsagesTestJSON-1395762051 tempest-TenantUsagesTestJSON-1395762051-project-member] [instance: e8631ec4-1823-46d2-8553-05e3336fed32] Aborting claim: {{(pid=62096) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 786.267131] env[62096]: DEBUG oslo_concurrency.lockutils [None req-a3983baa-43a6-408d-8a39-88c7bf1db267 tempest-TenantUsagesTestJSON-1395762051 tempest-TenantUsagesTestJSON-1395762051-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 786.275690] env[62096]: DEBUG oslo_concurrency.lockutils [req-7d6da94c-9f42-4e69-b0fc-51a301a6a69b req-b0ecbecc-380f-403b-8cca-79a2fcd4b7c9 service nova] Releasing lock "refresh_cache-e8631ec4-1823-46d2-8553-05e3336fed32" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 786.275908] env[62096]: DEBUG nova.compute.manager [req-7d6da94c-9f42-4e69-b0fc-51a301a6a69b req-b0ecbecc-380f-403b-8cca-79a2fcd4b7c9 service nova] [instance: e8631ec4-1823-46d2-8553-05e3336fed32] Received event network-vif-deleted-89fd1d75-07b4-4483-9ab1-b63973730b8e {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 786.279718] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Instance d5a6c627-bbab-49d4-a3bd-cb5b15264b18 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62096) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 786.280268] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg 8489140182fd46e2827bf0dd41dba553 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 786.289714] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8489140182fd46e2827bf0dd41dba553 [ 786.782672] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Instance 4057df30-37e2-415d-b6d2-e4211b95863d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62096) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 786.783271] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg bcee754f02594f7ca55843e39e607f80 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 786.793963] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bcee754f02594f7ca55843e39e607f80 [ 787.286569] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Instance 012afef5-91e9-4fc1-af98-c17a3188ad45 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62096) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 787.287135] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg f50afc9616f340a6b9a35f8a4e05fe83 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 787.298509] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f50afc9616f340a6b9a35f8a4e05fe83 [ 787.790052] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Instance 250ef7e7-266b-451d-8627-9cce211d4e83 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62096) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 787.790627] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg 16647c2336fd421c8935603dcd5041a5 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 787.801411] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 16647c2336fd421c8935603dcd5041a5 [ 788.293156] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Instance fcf84c67-fdea-41d7-aed9-690a45c97eaa has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62096) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 788.293783] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg 1696fd3ffe2840efb377c66afab0585e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 788.303735] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1696fd3ffe2840efb377c66afab0585e [ 788.796268] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Instance 6cc2a2c1-613d-40bd-a375-424b84b66ac9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62096) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 788.796851] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg 72895fe7ef424f9fbffabb2a1ca9eea0 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 788.807250] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 72895fe7ef424f9fbffabb2a1ca9eea0 [ 789.299746] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Instance a7838568-9d47-4306-8bb6-9ad74ab1feb3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62096) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 789.300334] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg efb60d4e98964d02b44825650f5e0adc in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 789.311995] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg efb60d4e98964d02b44825650f5e0adc [ 789.803064] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Instance 5c04dded-d1c9-44fe-bf2f-de295d21b725 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62096) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 789.803660] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg 680acf8a92044c9497961d27f799ba0f in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 789.813831] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 680acf8a92044c9497961d27f799ba0f [ 790.306278] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Instance aa3e06b6-89f8-425f-8950-30deb025a473 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62096) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 790.306778] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg 434089d7811b4086a23aaf46d7c65baa in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 790.316785] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 434089d7811b4086a23aaf46d7c65baa [ 790.809315] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Instance 5627f913-29d2-476e-8fde-8ea457cc56f2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62096) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 790.809625] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Total usable vcpus: 48, total allocated vcpus: 5 {{(pid=62096) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 790.809735] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1472MB phys_disk=200GB used_disk=5GB total_vcpus=48 used_vcpus=5 pci_stats=[] {{(pid=62096) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 791.164676] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d3a470c-de17-4140-84c1-25543aad48a9 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.172387] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33abf13b-7bca-4b87-9725-d0e5b7ad354a {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.201364] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9a76595-d187-48f1-aac5-715a459b52b1 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.208112] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46a0c0e9-eaf0-4c9a-ac39-2410a627b748 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.220314] env[62096]: DEBUG nova.compute.provider_tree [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 791.220748] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg 418d1db0b13d440aabdf145dc410cf4f in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 791.227437] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 418d1db0b13d440aabdf145dc410cf4f [ 791.723194] env[62096]: DEBUG nova.scheduler.client.report [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 791.725655] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg b56c08b3ce804422bf83d59a8ccfde7f in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 791.737585] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b56c08b3ce804422bf83d59a8ccfde7f [ 792.227859] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62096) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 792.228234] env[62096]: DEBUG oslo_concurrency.lockutils [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 11.034s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 792.228433] env[62096]: DEBUG oslo_concurrency.lockutils [None req-8017d6f3-a8b6-4835-9973-b3f86aaa9630 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 26.499s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 792.230347] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-8017d6f3-a8b6-4835-9973-b3f86aaa9630 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Expecting reply to msg 0624d81178fd499298b9d9605ace5539 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 792.231466] env[62096]: DEBUG oslo_service.periodic_task [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62096) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 792.231610] env[62096]: DEBUG nova.compute.manager [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Cleaning up deleted instances {{(pid=62096) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11226}} [ 792.232169] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg 280fd97b624842e8b7b2288b962f14a6 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 792.251782] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 280fd97b624842e8b7b2288b962f14a6 [ 792.266124] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0624d81178fd499298b9d9605ace5539 [ 792.740580] env[62096]: DEBUG nova.compute.manager [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] There are 2 instances to clean {{(pid=62096) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11235}} [ 792.740580] env[62096]: DEBUG nova.compute.manager [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] [instance: 06b9105a-8dcb-4bff-bba2-05e179036f24] Instance has had 0 of 5 cleanup attempts {{(pid=62096) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11239}} [ 792.740580] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg 8f8a8bad596642e8b41b79418f6ca6eb in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 792.777669] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8f8a8bad596642e8b41b79418f6ca6eb [ 793.033751] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48aec8ec-0851-4738-bdb3-f0e25b3e717f {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.038677] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcf1143a-02d2-462e-937e-44c6e4b1ec4c {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.068436] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b2b9f9a-6409-4907-ade9-1c919ae4a64c {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.075179] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2836ea71-ef07-4190-b3a0-cd3d961bb3a9 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.087885] env[62096]: DEBUG nova.compute.provider_tree [None req-8017d6f3-a8b6-4835-9973-b3f86aaa9630 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 793.088618] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-8017d6f3-a8b6-4835-9973-b3f86aaa9630 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Expecting reply to msg f2511670f8cf47ae88dd9c4e4f7dabb6 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 793.096506] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f2511670f8cf47ae88dd9c4e4f7dabb6 [ 793.242749] env[62096]: DEBUG nova.compute.manager [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] [instance: 12a4b44d-b6aa-45e2-bba9-d73f41fa4b61] Instance has had 0 of 5 cleanup attempts {{(pid=62096) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11239}} [ 793.244048] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg aa05f3495b4b49218e33a7e8b550321b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 793.264361] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg aa05f3495b4b49218e33a7e8b550321b [ 793.595327] env[62096]: DEBUG nova.scheduler.client.report [None req-8017d6f3-a8b6-4835-9973-b3f86aaa9630 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 793.595327] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-8017d6f3-a8b6-4835-9973-b3f86aaa9630 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Expecting reply to msg f7354a2ae31a47929f4f4d15b0e8ba85 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 793.605356] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f7354a2ae31a47929f4f4d15b0e8ba85 [ 793.748942] env[62096]: DEBUG oslo_service.periodic_task [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62096) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 793.748942] env[62096]: DEBUG nova.compute.manager [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Cleaning up deleted instances with incomplete migration {{(pid=62096) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11264}} [ 793.748942] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg 2b883b5473f04893a78d22fa8fa4d7b1 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 793.758397] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2b883b5473f04893a78d22fa8fa4d7b1 [ 794.104098] env[62096]: DEBUG oslo_concurrency.lockutils [None req-8017d6f3-a8b6-4835-9973-b3f86aaa9630 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.869s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 794.104098] env[62096]: ERROR nova.compute.manager [None req-8017d6f3-a8b6-4835-9973-b3f86aaa9630 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 8f78fa6f-374c-4405-b1b5-d31f92a1df08, please check neutron logs for more information. [ 794.104098] env[62096]: ERROR nova.compute.manager [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] Traceback (most recent call last): [ 794.104098] env[62096]: ERROR nova.compute.manager [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 794.104098] env[62096]: ERROR nova.compute.manager [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] self.driver.spawn(context, instance, image_meta, [ 794.104098] env[62096]: ERROR nova.compute.manager [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 794.104098] env[62096]: ERROR nova.compute.manager [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] self._vmops.spawn(context, instance, image_meta, injected_files, [ 794.104098] env[62096]: ERROR nova.compute.manager [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 794.104098] env[62096]: ERROR nova.compute.manager [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] vm_ref = self.build_virtual_machine(instance, [ 794.104405] env[62096]: ERROR nova.compute.manager [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 794.104405] env[62096]: ERROR nova.compute.manager [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] vif_infos = vmwarevif.get_vif_info(self._session, [ 794.104405] env[62096]: ERROR nova.compute.manager [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 794.104405] env[62096]: ERROR nova.compute.manager [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] for vif in network_info: [ 794.104405] env[62096]: ERROR nova.compute.manager [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 794.104405] env[62096]: ERROR nova.compute.manager [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] return self._sync_wrapper(fn, *args, **kwargs) [ 794.104405] env[62096]: ERROR nova.compute.manager [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 794.104405] env[62096]: ERROR nova.compute.manager [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] self.wait() [ 794.104405] env[62096]: ERROR nova.compute.manager [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 794.104405] env[62096]: ERROR nova.compute.manager [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] self[:] = self._gt.wait() [ 794.104405] env[62096]: ERROR nova.compute.manager [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 794.104405] env[62096]: ERROR nova.compute.manager [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] return self._exit_event.wait() [ 794.104405] env[62096]: ERROR nova.compute.manager [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 794.104687] env[62096]: ERROR nova.compute.manager [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] result = hub.switch() [ 794.104687] env[62096]: ERROR nova.compute.manager [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 794.104687] env[62096]: ERROR nova.compute.manager [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] return self.greenlet.switch() [ 794.104687] env[62096]: ERROR nova.compute.manager [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 794.104687] env[62096]: ERROR nova.compute.manager [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] result = function(*args, **kwargs) [ 794.104687] env[62096]: ERROR nova.compute.manager [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 794.104687] env[62096]: ERROR nova.compute.manager [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] return func(*args, **kwargs) [ 794.104687] env[62096]: ERROR nova.compute.manager [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 794.104687] env[62096]: ERROR nova.compute.manager [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] raise e [ 794.104687] env[62096]: ERROR nova.compute.manager [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 794.104687] env[62096]: ERROR nova.compute.manager [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] nwinfo = self.network_api.allocate_for_instance( [ 794.104687] env[62096]: ERROR nova.compute.manager [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 794.104687] env[62096]: ERROR nova.compute.manager [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] created_port_ids = self._update_ports_for_instance( [ 794.104977] env[62096]: ERROR nova.compute.manager [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 794.104977] env[62096]: ERROR nova.compute.manager [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] with excutils.save_and_reraise_exception(): [ 794.104977] env[62096]: ERROR nova.compute.manager [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 794.104977] env[62096]: ERROR nova.compute.manager [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] self.force_reraise() [ 794.104977] env[62096]: ERROR nova.compute.manager [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 794.104977] env[62096]: ERROR nova.compute.manager [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] raise self.value [ 794.104977] env[62096]: ERROR nova.compute.manager [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 794.104977] env[62096]: ERROR nova.compute.manager [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] updated_port = self._update_port( [ 794.104977] env[62096]: ERROR nova.compute.manager [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 794.104977] env[62096]: ERROR nova.compute.manager [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] _ensure_no_port_binding_failure(port) [ 794.104977] env[62096]: ERROR nova.compute.manager [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 794.104977] env[62096]: ERROR nova.compute.manager [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] raise exception.PortBindingFailed(port_id=port['id']) [ 794.105290] env[62096]: ERROR nova.compute.manager [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] nova.exception.PortBindingFailed: Binding failed for port 8f78fa6f-374c-4405-b1b5-d31f92a1df08, please check neutron logs for more information. [ 794.105290] env[62096]: ERROR nova.compute.manager [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] [ 794.105290] env[62096]: DEBUG nova.compute.utils [None req-8017d6f3-a8b6-4835-9973-b3f86aaa9630 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] Binding failed for port 8f78fa6f-374c-4405-b1b5-d31f92a1df08, please check neutron logs for more information. {{(pid=62096) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 794.105290] env[62096]: DEBUG oslo_concurrency.lockutils [None req-2fec121e-bb5d-41f2-9f13-5023c9def8ac tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.849s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 794.105290] env[62096]: INFO nova.compute.claims [None req-2fec121e-bb5d-41f2-9f13-5023c9def8ac tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] [instance: f050f0de-dc84-4825-b490-eafe522354cd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 794.105290] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-2fec121e-bb5d-41f2-9f13-5023c9def8ac tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Expecting reply to msg 264a9b3cdb8f4ccba405705fce2d2f6d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 794.106866] env[62096]: DEBUG nova.compute.manager [None req-8017d6f3-a8b6-4835-9973-b3f86aaa9630 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] Build of instance 510189aa-204c-4fd6-90d5-47a7ce5f7630 was re-scheduled: Binding failed for port 8f78fa6f-374c-4405-b1b5-d31f92a1df08, please check neutron logs for more information. {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 794.107597] env[62096]: DEBUG nova.compute.manager [None req-8017d6f3-a8b6-4835-9973-b3f86aaa9630 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] Unplugging VIFs for instance {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 794.107967] env[62096]: DEBUG oslo_concurrency.lockutils [None req-8017d6f3-a8b6-4835-9973-b3f86aaa9630 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Acquiring lock "refresh_cache-510189aa-204c-4fd6-90d5-47a7ce5f7630" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 794.108266] env[62096]: DEBUG oslo_concurrency.lockutils [None req-8017d6f3-a8b6-4835-9973-b3f86aaa9630 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Acquired lock "refresh_cache-510189aa-204c-4fd6-90d5-47a7ce5f7630" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 794.108552] env[62096]: DEBUG nova.network.neutron [None req-8017d6f3-a8b6-4835-9973-b3f86aaa9630 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 794.109034] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-8017d6f3-a8b6-4835-9973-b3f86aaa9630 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Expecting reply to msg 03ec5d49f31b4e69bee4bf4efcef02aa in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 794.115660] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 03ec5d49f31b4e69bee4bf4efcef02aa [ 794.137343] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 264a9b3cdb8f4ccba405705fce2d2f6d [ 794.251607] env[62096]: DEBUG oslo_service.periodic_task [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62096) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 794.251607] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg 1475ec6f274b492396b5af0a0eaba2aa in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 794.263320] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1475ec6f274b492396b5af0a0eaba2aa [ 794.609150] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-2fec121e-bb5d-41f2-9f13-5023c9def8ac tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Expecting reply to msg 6bfcd52ba60f4c6f9444225d678bfae1 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 794.616902] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6bfcd52ba60f4c6f9444225d678bfae1 [ 794.624690] env[62096]: DEBUG nova.network.neutron [None req-8017d6f3-a8b6-4835-9973-b3f86aaa9630 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 794.725948] env[62096]: DEBUG nova.network.neutron [None req-8017d6f3-a8b6-4835-9973-b3f86aaa9630 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 794.726471] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-8017d6f3-a8b6-4835-9973-b3f86aaa9630 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Expecting reply to msg 79d707b370b64424873b8a9ccbbfebec in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 794.734722] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 79d707b370b64424873b8a9ccbbfebec [ 795.229133] env[62096]: DEBUG oslo_concurrency.lockutils [None req-8017d6f3-a8b6-4835-9973-b3f86aaa9630 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Releasing lock "refresh_cache-510189aa-204c-4fd6-90d5-47a7ce5f7630" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 795.229378] env[62096]: DEBUG nova.compute.manager [None req-8017d6f3-a8b6-4835-9973-b3f86aaa9630 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 795.229520] env[62096]: DEBUG nova.compute.manager [None req-8017d6f3-a8b6-4835-9973-b3f86aaa9630 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 795.229686] env[62096]: DEBUG nova.network.neutron [None req-8017d6f3-a8b6-4835-9973-b3f86aaa9630 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 795.249046] env[62096]: DEBUG nova.network.neutron [None req-8017d6f3-a8b6-4835-9973-b3f86aaa9630 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 795.249619] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-8017d6f3-a8b6-4835-9973-b3f86aaa9630 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Expecting reply to msg e35b201862944daf81f02f2c8fb200d1 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 795.256931] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e35b201862944daf81f02f2c8fb200d1 [ 795.369349] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f811dd0b-eca2-4d57-8dfb-4621d51141b1 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.377122] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-806aebe6-6e1c-4fc1-a8a1-b9d95af246c4 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.408613] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d9622db-075f-4354-824a-82b9980f0896 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.415881] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36fd6975-d445-49fe-8cd0-3500c467f8e7 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.428540] env[62096]: DEBUG nova.compute.provider_tree [None req-2fec121e-bb5d-41f2-9f13-5023c9def8ac tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 795.429036] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-2fec121e-bb5d-41f2-9f13-5023c9def8ac tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Expecting reply to msg 27ec268279bc4d3c82a939e2c02812f6 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 795.436060] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 27ec268279bc4d3c82a939e2c02812f6 [ 795.751536] env[62096]: DEBUG nova.network.neutron [None req-8017d6f3-a8b6-4835-9973-b3f86aaa9630 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 795.752090] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-8017d6f3-a8b6-4835-9973-b3f86aaa9630 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Expecting reply to msg b94648be47fb4310b3829c4f1acb3e77 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 795.761686] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b94648be47fb4310b3829c4f1acb3e77 [ 795.931990] env[62096]: DEBUG nova.scheduler.client.report [None req-2fec121e-bb5d-41f2-9f13-5023c9def8ac tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 795.934423] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-2fec121e-bb5d-41f2-9f13-5023c9def8ac tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Expecting reply to msg 5404243229ab4f16affbdd168282fb55 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 795.945215] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5404243229ab4f16affbdd168282fb55 [ 796.254057] env[62096]: INFO nova.compute.manager [None req-8017d6f3-a8b6-4835-9973-b3f86aaa9630 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] [instance: 510189aa-204c-4fd6-90d5-47a7ce5f7630] Took 1.02 seconds to deallocate network for instance. [ 796.255743] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-8017d6f3-a8b6-4835-9973-b3f86aaa9630 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Expecting reply to msg 235b88a356b0480ea52f65f3b0b46d68 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 796.290502] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 235b88a356b0480ea52f65f3b0b46d68 [ 796.436796] env[62096]: DEBUG oslo_concurrency.lockutils [None req-2fec121e-bb5d-41f2-9f13-5023c9def8ac tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.337s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 796.437392] env[62096]: DEBUG nova.compute.manager [None req-2fec121e-bb5d-41f2-9f13-5023c9def8ac tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] [instance: f050f0de-dc84-4825-b490-eafe522354cd] Start building networks asynchronously for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 796.439107] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-2fec121e-bb5d-41f2-9f13-5023c9def8ac tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Expecting reply to msg ca82eab12344484ea7997a33fcf74681 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 796.440180] env[62096]: DEBUG oslo_concurrency.lockutils [None req-35a36660-23cc-4f3d-b53e-6fe46ab70d44 tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.290s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 796.441670] env[62096]: INFO nova.compute.claims [None req-35a36660-23cc-4f3d-b53e-6fe46ab70d44 tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 796.443691] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-35a36660-23cc-4f3d-b53e-6fe46ab70d44 tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Expecting reply to msg c34777516932476484eea2f7dcc72ab8 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 796.469451] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ca82eab12344484ea7997a33fcf74681 [ 796.475476] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c34777516932476484eea2f7dcc72ab8 [ 796.759721] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-8017d6f3-a8b6-4835-9973-b3f86aaa9630 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Expecting reply to msg ddfe41a7f0f446d5a4d4f43ebced99d0 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 796.791218] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ddfe41a7f0f446d5a4d4f43ebced99d0 [ 796.946224] env[62096]: DEBUG nova.compute.utils [None req-2fec121e-bb5d-41f2-9f13-5023c9def8ac tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Using /dev/sd instead of None {{(pid=62096) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 796.946855] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-2fec121e-bb5d-41f2-9f13-5023c9def8ac tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Expecting reply to msg 991e42b8b8d649b69b479e279b67ab8b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 796.948828] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-35a36660-23cc-4f3d-b53e-6fe46ab70d44 tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Expecting reply to msg 49b8cef1ef754d73bfff0d0a36e9ae72 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 796.949798] env[62096]: DEBUG nova.compute.manager [None req-2fec121e-bb5d-41f2-9f13-5023c9def8ac tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] [instance: f050f0de-dc84-4825-b490-eafe522354cd] Allocating IP information in the background. {{(pid=62096) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 796.949884] env[62096]: DEBUG nova.network.neutron [None req-2fec121e-bb5d-41f2-9f13-5023c9def8ac tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] [instance: f050f0de-dc84-4825-b490-eafe522354cd] allocate_for_instance() {{(pid=62096) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 796.957262] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 49b8cef1ef754d73bfff0d0a36e9ae72 [ 796.959348] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 991e42b8b8d649b69b479e279b67ab8b [ 797.011275] env[62096]: DEBUG nova.policy [None req-2fec121e-bb5d-41f2-9f13-5023c9def8ac tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'baa97847d396418f89287f224b73c464', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '98ac4c25deee45e8a655575d81f18bca', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62096) authorize /opt/stack/nova/nova/policy.py:203}} [ 797.283133] env[62096]: INFO nova.scheduler.client.report [None req-8017d6f3-a8b6-4835-9973-b3f86aaa9630 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Deleted allocations for instance 510189aa-204c-4fd6-90d5-47a7ce5f7630 [ 797.289174] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-8017d6f3-a8b6-4835-9973-b3f86aaa9630 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Expecting reply to msg 693b7bfbc1c94b9390a6daa7f307b73c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 797.307844] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 693b7bfbc1c94b9390a6daa7f307b73c [ 797.343684] env[62096]: DEBUG nova.network.neutron [None req-2fec121e-bb5d-41f2-9f13-5023c9def8ac tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] [instance: f050f0de-dc84-4825-b490-eafe522354cd] Successfully created port: fc98466e-f50e-479f-8ee2-97a380fef60d {{(pid=62096) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 797.450472] env[62096]: DEBUG nova.compute.manager [None req-2fec121e-bb5d-41f2-9f13-5023c9def8ac tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] [instance: f050f0de-dc84-4825-b490-eafe522354cd] Start building block device mappings for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 797.452309] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-2fec121e-bb5d-41f2-9f13-5023c9def8ac tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Expecting reply to msg 8371931e78504fb6ae2e964960f1a90a in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 797.486241] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8371931e78504fb6ae2e964960f1a90a [ 797.704504] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5975084f-e911-42ff-80f8-4c5168d4a9ca {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.712744] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d524bda5-7f6d-4a27-aad5-62813057c163 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.742438] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f5fbfdb-7875-4107-a255-663f719cb9e1 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.749863] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6aea9602-fa7e-4bf5-9227-baacd0d3c3f7 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.764779] env[62096]: DEBUG nova.compute.provider_tree [None req-35a36660-23cc-4f3d-b53e-6fe46ab70d44 tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 797.765299] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-35a36660-23cc-4f3d-b53e-6fe46ab70d44 tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Expecting reply to msg f048cd52b15945f49678d284a6115947 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 797.772104] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f048cd52b15945f49678d284a6115947 [ 797.800080] env[62096]: DEBUG oslo_concurrency.lockutils [None req-8017d6f3-a8b6-4835-9973-b3f86aaa9630 tempest-VolumesAdminNegativeTest-2010027559 tempest-VolumesAdminNegativeTest-2010027559-project-member] Lock "510189aa-204c-4fd6-90d5-47a7ce5f7630" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 170.572s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 797.800080] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-cd2ebb42-6f90-4faf-9937-567b5002bce1 tempest-ServerMetadataNegativeTestJSON-946662778 tempest-ServerMetadataNegativeTestJSON-946662778-project-member] Expecting reply to msg cfb5d943ae6241f9aac7388b32723306 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 797.809360] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cfb5d943ae6241f9aac7388b32723306 [ 797.961202] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-2fec121e-bb5d-41f2-9f13-5023c9def8ac tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Expecting reply to msg af29df5aeaaf46e2aff8dd6ad60599f2 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 798.006364] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg af29df5aeaaf46e2aff8dd6ad60599f2 [ 798.273102] env[62096]: DEBUG nova.scheduler.client.report [None req-35a36660-23cc-4f3d-b53e-6fe46ab70d44 tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 798.273102] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-35a36660-23cc-4f3d-b53e-6fe46ab70d44 tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Expecting reply to msg 0733acb231d0466b88b2f8159cee7082 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 798.284673] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0733acb231d0466b88b2f8159cee7082 [ 798.303713] env[62096]: DEBUG nova.compute.manager [None req-cd2ebb42-6f90-4faf-9937-567b5002bce1 tempest-ServerMetadataNegativeTestJSON-946662778 tempest-ServerMetadataNegativeTestJSON-946662778-project-member] [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] Starting instance... {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 798.303713] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-cd2ebb42-6f90-4faf-9937-567b5002bce1 tempest-ServerMetadataNegativeTestJSON-946662778 tempest-ServerMetadataNegativeTestJSON-946662778-project-member] Expecting reply to msg b51a54bcbf454a4f97a89d4457211f9a in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 798.338861] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b51a54bcbf454a4f97a89d4457211f9a [ 798.465522] env[62096]: DEBUG nova.compute.manager [None req-2fec121e-bb5d-41f2-9f13-5023c9def8ac tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] [instance: f050f0de-dc84-4825-b490-eafe522354cd] Start spawning the instance on the hypervisor. {{(pid=62096) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 798.494251] env[62096]: DEBUG nova.virt.hardware [None req-2fec121e-bb5d-41f2-9f13-5023c9def8ac tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-19T11:52:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-19T11:51:55Z,direct_url=,disk_format='vmdk',id=fb764baa-3805-45c4-a694-aa91b0932110,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bd76085d004b408bbcf1ebf76ba2593e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-19T11:51:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 798.494639] env[62096]: DEBUG nova.virt.hardware [None req-2fec121e-bb5d-41f2-9f13-5023c9def8ac tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Flavor limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 798.494964] env[62096]: DEBUG nova.virt.hardware [None req-2fec121e-bb5d-41f2-9f13-5023c9def8ac tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Image limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 798.495256] env[62096]: DEBUG nova.virt.hardware [None req-2fec121e-bb5d-41f2-9f13-5023c9def8ac tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Flavor pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 798.495500] env[62096]: DEBUG nova.virt.hardware [None req-2fec121e-bb5d-41f2-9f13-5023c9def8ac tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Image pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 798.495688] env[62096]: DEBUG nova.virt.hardware [None req-2fec121e-bb5d-41f2-9f13-5023c9def8ac tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 798.495987] env[62096]: DEBUG nova.virt.hardware [None req-2fec121e-bb5d-41f2-9f13-5023c9def8ac tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 798.496235] env[62096]: DEBUG nova.virt.hardware [None req-2fec121e-bb5d-41f2-9f13-5023c9def8ac tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 798.496492] env[62096]: DEBUG nova.virt.hardware [None req-2fec121e-bb5d-41f2-9f13-5023c9def8ac tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Got 1 possible topologies {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 798.496719] env[62096]: DEBUG nova.virt.hardware [None req-2fec121e-bb5d-41f2-9f13-5023c9def8ac tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 798.496955] env[62096]: DEBUG nova.virt.hardware [None req-2fec121e-bb5d-41f2-9f13-5023c9def8ac tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 798.497871] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0a60335-606a-4869-855e-eb52ac6cf0ec {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.506462] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41531c53-deab-4ab8-a890-c778f4ffd1ae {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.512737] env[62096]: DEBUG nova.compute.manager [req-38136cee-0b36-4a6f-86d3-4f6e79043cff req-81406904-5e55-4088-9c92-34c2246e6845 service nova] [instance: f050f0de-dc84-4825-b490-eafe522354cd] Received event network-changed-fc98466e-f50e-479f-8ee2-97a380fef60d {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 798.513488] env[62096]: DEBUG nova.compute.manager [req-38136cee-0b36-4a6f-86d3-4f6e79043cff req-81406904-5e55-4088-9c92-34c2246e6845 service nova] [instance: f050f0de-dc84-4825-b490-eafe522354cd] Refreshing instance network info cache due to event network-changed-fc98466e-f50e-479f-8ee2-97a380fef60d. {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 798.513791] env[62096]: DEBUG oslo_concurrency.lockutils [req-38136cee-0b36-4a6f-86d3-4f6e79043cff req-81406904-5e55-4088-9c92-34c2246e6845 service nova] Acquiring lock "refresh_cache-f050f0de-dc84-4825-b490-eafe522354cd" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 798.514044] env[62096]: DEBUG oslo_concurrency.lockutils [req-38136cee-0b36-4a6f-86d3-4f6e79043cff req-81406904-5e55-4088-9c92-34c2246e6845 service nova] Acquired lock "refresh_cache-f050f0de-dc84-4825-b490-eafe522354cd" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 798.514263] env[62096]: DEBUG nova.network.neutron [req-38136cee-0b36-4a6f-86d3-4f6e79043cff req-81406904-5e55-4088-9c92-34c2246e6845 service nova] [instance: f050f0de-dc84-4825-b490-eafe522354cd] Refreshing network info cache for port fc98466e-f50e-479f-8ee2-97a380fef60d {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 798.514729] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-38136cee-0b36-4a6f-86d3-4f6e79043cff req-81406904-5e55-4088-9c92-34c2246e6845 service nova] Expecting reply to msg 1e0e6b653c7d482a851a947fa130507d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 798.523452] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1e0e6b653c7d482a851a947fa130507d [ 798.650197] env[62096]: ERROR nova.compute.manager [None req-2fec121e-bb5d-41f2-9f13-5023c9def8ac tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port fc98466e-f50e-479f-8ee2-97a380fef60d, please check neutron logs for more information. [ 798.650197] env[62096]: ERROR nova.compute.manager Traceback (most recent call last): [ 798.650197] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 798.650197] env[62096]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 798.650197] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 798.650197] env[62096]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 798.650197] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 798.650197] env[62096]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 798.650197] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 798.650197] env[62096]: ERROR nova.compute.manager self.force_reraise() [ 798.650197] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 798.650197] env[62096]: ERROR nova.compute.manager raise self.value [ 798.650197] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 798.650197] env[62096]: ERROR nova.compute.manager updated_port = self._update_port( [ 798.650197] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 798.650197] env[62096]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 798.650595] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 798.650595] env[62096]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 798.650595] env[62096]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port fc98466e-f50e-479f-8ee2-97a380fef60d, please check neutron logs for more information. [ 798.650595] env[62096]: ERROR nova.compute.manager [ 798.650595] env[62096]: Traceback (most recent call last): [ 798.650595] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 798.650595] env[62096]: listener.cb(fileno) [ 798.650595] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 798.650595] env[62096]: result = function(*args, **kwargs) [ 798.650595] env[62096]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 798.650595] env[62096]: return func(*args, **kwargs) [ 798.650595] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 798.650595] env[62096]: raise e [ 798.650595] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 798.650595] env[62096]: nwinfo = self.network_api.allocate_for_instance( [ 798.650595] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 798.650595] env[62096]: created_port_ids = self._update_ports_for_instance( [ 798.650595] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 798.650595] env[62096]: with excutils.save_and_reraise_exception(): [ 798.650595] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 798.650595] env[62096]: self.force_reraise() [ 798.650595] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 798.650595] env[62096]: raise self.value [ 798.650595] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 798.650595] env[62096]: updated_port = self._update_port( [ 798.650595] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 798.650595] env[62096]: _ensure_no_port_binding_failure(port) [ 798.650595] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 798.650595] env[62096]: raise exception.PortBindingFailed(port_id=port['id']) [ 798.651244] env[62096]: nova.exception.PortBindingFailed: Binding failed for port fc98466e-f50e-479f-8ee2-97a380fef60d, please check neutron logs for more information. [ 798.651244] env[62096]: Removing descriptor: 16 [ 798.651244] env[62096]: ERROR nova.compute.manager [None req-2fec121e-bb5d-41f2-9f13-5023c9def8ac tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] [instance: f050f0de-dc84-4825-b490-eafe522354cd] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port fc98466e-f50e-479f-8ee2-97a380fef60d, please check neutron logs for more information. [ 798.651244] env[62096]: ERROR nova.compute.manager [instance: f050f0de-dc84-4825-b490-eafe522354cd] Traceback (most recent call last): [ 798.651244] env[62096]: ERROR nova.compute.manager [instance: f050f0de-dc84-4825-b490-eafe522354cd] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 798.651244] env[62096]: ERROR nova.compute.manager [instance: f050f0de-dc84-4825-b490-eafe522354cd] yield resources [ 798.651244] env[62096]: ERROR nova.compute.manager [instance: f050f0de-dc84-4825-b490-eafe522354cd] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 798.651244] env[62096]: ERROR nova.compute.manager [instance: f050f0de-dc84-4825-b490-eafe522354cd] self.driver.spawn(context, instance, image_meta, [ 798.651244] env[62096]: ERROR nova.compute.manager [instance: f050f0de-dc84-4825-b490-eafe522354cd] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 798.651244] env[62096]: ERROR nova.compute.manager [instance: f050f0de-dc84-4825-b490-eafe522354cd] self._vmops.spawn(context, instance, image_meta, injected_files, [ 798.651244] env[62096]: ERROR nova.compute.manager [instance: f050f0de-dc84-4825-b490-eafe522354cd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 798.651244] env[62096]: ERROR nova.compute.manager [instance: f050f0de-dc84-4825-b490-eafe522354cd] vm_ref = self.build_virtual_machine(instance, [ 798.651528] env[62096]: ERROR nova.compute.manager [instance: f050f0de-dc84-4825-b490-eafe522354cd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 798.651528] env[62096]: ERROR nova.compute.manager [instance: f050f0de-dc84-4825-b490-eafe522354cd] vif_infos = vmwarevif.get_vif_info(self._session, [ 798.651528] env[62096]: ERROR nova.compute.manager [instance: f050f0de-dc84-4825-b490-eafe522354cd] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 798.651528] env[62096]: ERROR nova.compute.manager [instance: f050f0de-dc84-4825-b490-eafe522354cd] for vif in network_info: [ 798.651528] env[62096]: ERROR nova.compute.manager [instance: f050f0de-dc84-4825-b490-eafe522354cd] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 798.651528] env[62096]: ERROR nova.compute.manager [instance: f050f0de-dc84-4825-b490-eafe522354cd] return self._sync_wrapper(fn, *args, **kwargs) [ 798.651528] env[62096]: ERROR nova.compute.manager [instance: f050f0de-dc84-4825-b490-eafe522354cd] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 798.651528] env[62096]: ERROR nova.compute.manager [instance: f050f0de-dc84-4825-b490-eafe522354cd] self.wait() [ 798.651528] env[62096]: ERROR nova.compute.manager [instance: f050f0de-dc84-4825-b490-eafe522354cd] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 798.651528] env[62096]: ERROR nova.compute.manager [instance: f050f0de-dc84-4825-b490-eafe522354cd] self[:] = self._gt.wait() [ 798.651528] env[62096]: ERROR nova.compute.manager [instance: f050f0de-dc84-4825-b490-eafe522354cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 798.651528] env[62096]: ERROR nova.compute.manager [instance: f050f0de-dc84-4825-b490-eafe522354cd] return self._exit_event.wait() [ 798.651528] env[62096]: ERROR nova.compute.manager [instance: f050f0de-dc84-4825-b490-eafe522354cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 798.651897] env[62096]: ERROR nova.compute.manager [instance: f050f0de-dc84-4825-b490-eafe522354cd] result = hub.switch() [ 798.651897] env[62096]: ERROR nova.compute.manager [instance: f050f0de-dc84-4825-b490-eafe522354cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 798.651897] env[62096]: ERROR nova.compute.manager [instance: f050f0de-dc84-4825-b490-eafe522354cd] return self.greenlet.switch() [ 798.651897] env[62096]: ERROR nova.compute.manager [instance: f050f0de-dc84-4825-b490-eafe522354cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 798.651897] env[62096]: ERROR nova.compute.manager [instance: f050f0de-dc84-4825-b490-eafe522354cd] result = function(*args, **kwargs) [ 798.651897] env[62096]: ERROR nova.compute.manager [instance: f050f0de-dc84-4825-b490-eafe522354cd] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 798.651897] env[62096]: ERROR nova.compute.manager [instance: f050f0de-dc84-4825-b490-eafe522354cd] return func(*args, **kwargs) [ 798.651897] env[62096]: ERROR nova.compute.manager [instance: f050f0de-dc84-4825-b490-eafe522354cd] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 798.651897] env[62096]: ERROR nova.compute.manager [instance: f050f0de-dc84-4825-b490-eafe522354cd] raise e [ 798.651897] env[62096]: ERROR nova.compute.manager [instance: f050f0de-dc84-4825-b490-eafe522354cd] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 798.651897] env[62096]: ERROR nova.compute.manager [instance: f050f0de-dc84-4825-b490-eafe522354cd] nwinfo = self.network_api.allocate_for_instance( [ 798.651897] env[62096]: ERROR nova.compute.manager [instance: f050f0de-dc84-4825-b490-eafe522354cd] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 798.651897] env[62096]: ERROR nova.compute.manager [instance: f050f0de-dc84-4825-b490-eafe522354cd] created_port_ids = self._update_ports_for_instance( [ 798.652214] env[62096]: ERROR nova.compute.manager [instance: f050f0de-dc84-4825-b490-eafe522354cd] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 798.652214] env[62096]: ERROR nova.compute.manager [instance: f050f0de-dc84-4825-b490-eafe522354cd] with excutils.save_and_reraise_exception(): [ 798.652214] env[62096]: ERROR nova.compute.manager [instance: f050f0de-dc84-4825-b490-eafe522354cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 798.652214] env[62096]: ERROR nova.compute.manager [instance: f050f0de-dc84-4825-b490-eafe522354cd] self.force_reraise() [ 798.652214] env[62096]: ERROR nova.compute.manager [instance: f050f0de-dc84-4825-b490-eafe522354cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 798.652214] env[62096]: ERROR nova.compute.manager [instance: f050f0de-dc84-4825-b490-eafe522354cd] raise self.value [ 798.652214] env[62096]: ERROR nova.compute.manager [instance: f050f0de-dc84-4825-b490-eafe522354cd] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 798.652214] env[62096]: ERROR nova.compute.manager [instance: f050f0de-dc84-4825-b490-eafe522354cd] updated_port = self._update_port( [ 798.652214] env[62096]: ERROR nova.compute.manager [instance: f050f0de-dc84-4825-b490-eafe522354cd] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 798.652214] env[62096]: ERROR nova.compute.manager [instance: f050f0de-dc84-4825-b490-eafe522354cd] _ensure_no_port_binding_failure(port) [ 798.652214] env[62096]: ERROR nova.compute.manager [instance: f050f0de-dc84-4825-b490-eafe522354cd] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 798.652214] env[62096]: ERROR nova.compute.manager [instance: f050f0de-dc84-4825-b490-eafe522354cd] raise exception.PortBindingFailed(port_id=port['id']) [ 798.652484] env[62096]: ERROR nova.compute.manager [instance: f050f0de-dc84-4825-b490-eafe522354cd] nova.exception.PortBindingFailed: Binding failed for port fc98466e-f50e-479f-8ee2-97a380fef60d, please check neutron logs for more information. [ 798.652484] env[62096]: ERROR nova.compute.manager [instance: f050f0de-dc84-4825-b490-eafe522354cd] [ 798.652484] env[62096]: INFO nova.compute.manager [None req-2fec121e-bb5d-41f2-9f13-5023c9def8ac tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] [instance: f050f0de-dc84-4825-b490-eafe522354cd] Terminating instance [ 798.653905] env[62096]: DEBUG oslo_concurrency.lockutils [None req-2fec121e-bb5d-41f2-9f13-5023c9def8ac tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Acquiring lock "refresh_cache-f050f0de-dc84-4825-b490-eafe522354cd" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 798.781412] env[62096]: DEBUG oslo_concurrency.lockutils [None req-35a36660-23cc-4f3d-b53e-6fe46ab70d44 tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.336s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 798.781412] env[62096]: DEBUG nova.compute.manager [None req-35a36660-23cc-4f3d-b53e-6fe46ab70d44 tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] Start building networks asynchronously for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 798.781412] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-35a36660-23cc-4f3d-b53e-6fe46ab70d44 tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Expecting reply to msg b736213e2baf487d96290aea4b64742e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 798.781412] env[62096]: DEBUG oslo_concurrency.lockutils [None req-534ea4b7-f1b9-4ff7-be62-1798f34a685d tempest-ServerAddressesTestJSON-1842394723 tempest-ServerAddressesTestJSON-1842394723-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.631s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 798.781412] env[62096]: INFO nova.compute.claims [None req-534ea4b7-f1b9-4ff7-be62-1798f34a685d tempest-ServerAddressesTestJSON-1842394723 tempest-ServerAddressesTestJSON-1842394723-project-member] [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 798.783416] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-534ea4b7-f1b9-4ff7-be62-1798f34a685d tempest-ServerAddressesTestJSON-1842394723 tempest-ServerAddressesTestJSON-1842394723-project-member] Expecting reply to msg 09b77a3a86294ea29d993a2ca0cad34b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 798.816675] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 09b77a3a86294ea29d993a2ca0cad34b [ 798.817235] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b736213e2baf487d96290aea4b64742e [ 798.824388] env[62096]: DEBUG oslo_concurrency.lockutils [None req-cd2ebb42-6f90-4faf-9937-567b5002bce1 tempest-ServerMetadataNegativeTestJSON-946662778 tempest-ServerMetadataNegativeTestJSON-946662778-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 799.046714] env[62096]: DEBUG nova.network.neutron [req-38136cee-0b36-4a6f-86d3-4f6e79043cff req-81406904-5e55-4088-9c92-34c2246e6845 service nova] [instance: f050f0de-dc84-4825-b490-eafe522354cd] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 799.171632] env[62096]: DEBUG nova.network.neutron [req-38136cee-0b36-4a6f-86d3-4f6e79043cff req-81406904-5e55-4088-9c92-34c2246e6845 service nova] [instance: f050f0de-dc84-4825-b490-eafe522354cd] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 799.173063] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-38136cee-0b36-4a6f-86d3-4f6e79043cff req-81406904-5e55-4088-9c92-34c2246e6845 service nova] Expecting reply to msg 06c986a97c16440f898d038763916b2d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 799.183378] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 06c986a97c16440f898d038763916b2d [ 799.286518] env[62096]: DEBUG nova.compute.utils [None req-35a36660-23cc-4f3d-b53e-6fe46ab70d44 tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Using /dev/sd instead of None {{(pid=62096) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 799.287085] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-35a36660-23cc-4f3d-b53e-6fe46ab70d44 tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Expecting reply to msg 48368ab7d9ea408f87e6fa567cb8b070 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 799.292732] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-534ea4b7-f1b9-4ff7-be62-1798f34a685d tempest-ServerAddressesTestJSON-1842394723 tempest-ServerAddressesTestJSON-1842394723-project-member] Expecting reply to msg 7dea9076730c46eaa79855eab2075b08 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 799.292732] env[62096]: DEBUG nova.compute.manager [None req-35a36660-23cc-4f3d-b53e-6fe46ab70d44 tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] Allocating IP information in the background. {{(pid=62096) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 799.292732] env[62096]: DEBUG nova.network.neutron [None req-35a36660-23cc-4f3d-b53e-6fe46ab70d44 tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] allocate_for_instance() {{(pid=62096) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 799.297683] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 48368ab7d9ea408f87e6fa567cb8b070 [ 799.300574] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7dea9076730c46eaa79855eab2075b08 [ 799.357723] env[62096]: DEBUG nova.policy [None req-35a36660-23cc-4f3d-b53e-6fe46ab70d44 tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '224d54f476614dfb873e768d577afad6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '73069c3d34654084b7122d5d642d38a5', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62096) authorize /opt/stack/nova/nova/policy.py:203}} [ 799.675665] env[62096]: DEBUG oslo_concurrency.lockutils [req-38136cee-0b36-4a6f-86d3-4f6e79043cff req-81406904-5e55-4088-9c92-34c2246e6845 service nova] Releasing lock "refresh_cache-f050f0de-dc84-4825-b490-eafe522354cd" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 799.676118] env[62096]: DEBUG oslo_concurrency.lockutils [None req-2fec121e-bb5d-41f2-9f13-5023c9def8ac tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Acquired lock "refresh_cache-f050f0de-dc84-4825-b490-eafe522354cd" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 799.676334] env[62096]: DEBUG nova.network.neutron [None req-2fec121e-bb5d-41f2-9f13-5023c9def8ac tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] [instance: f050f0de-dc84-4825-b490-eafe522354cd] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 799.676761] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-2fec121e-bb5d-41f2-9f13-5023c9def8ac tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Expecting reply to msg 9c5116eb47534f1b9eed5e90d79d8c53 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 799.683652] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9c5116eb47534f1b9eed5e90d79d8c53 [ 799.791066] env[62096]: DEBUG nova.compute.manager [None req-35a36660-23cc-4f3d-b53e-6fe46ab70d44 tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] Start building block device mappings for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 799.793115] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-35a36660-23cc-4f3d-b53e-6fe46ab70d44 tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Expecting reply to msg 756c5c6f08404b7e9d12b8c1d5f5a492 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 799.831068] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 756c5c6f08404b7e9d12b8c1d5f5a492 [ 800.015480] env[62096]: DEBUG nova.network.neutron [None req-35a36660-23cc-4f3d-b53e-6fe46ab70d44 tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] Successfully created port: 956e7432-5428-47af-a76b-6187f602f8ba {{(pid=62096) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 800.108108] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0c75fc2-c5a2-4f70-8e60-910c2b3eb31b {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.114773] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6aada4f-3ea7-4272-9aba-79ada05b2f1b {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.154518] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbc863b3-2ae7-471f-859b-afaabc9ab26d {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.161979] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-912bec28-1e33-4b2c-a7cf-0eb963da20d4 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.175635] env[62096]: DEBUG nova.compute.provider_tree [None req-534ea4b7-f1b9-4ff7-be62-1798f34a685d tempest-ServerAddressesTestJSON-1842394723 tempest-ServerAddressesTestJSON-1842394723-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 800.176337] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-534ea4b7-f1b9-4ff7-be62-1798f34a685d tempest-ServerAddressesTestJSON-1842394723 tempest-ServerAddressesTestJSON-1842394723-project-member] Expecting reply to msg a4f035b0a9e347b98819e5bd69b530e9 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 800.183505] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a4f035b0a9e347b98819e5bd69b530e9 [ 800.209480] env[62096]: DEBUG nova.network.neutron [None req-2fec121e-bb5d-41f2-9f13-5023c9def8ac tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] [instance: f050f0de-dc84-4825-b490-eafe522354cd] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 800.306898] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-35a36660-23cc-4f3d-b53e-6fe46ab70d44 tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Expecting reply to msg 38cd500e29c04bd28aaba5af5fd547fb in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 800.340812] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 38cd500e29c04bd28aaba5af5fd547fb [ 800.343467] env[62096]: DEBUG nova.network.neutron [None req-2fec121e-bb5d-41f2-9f13-5023c9def8ac tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] [instance: f050f0de-dc84-4825-b490-eafe522354cd] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 800.344176] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-2fec121e-bb5d-41f2-9f13-5023c9def8ac tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Expecting reply to msg 5b6d1c24368b494b805c1e3d4eccfb4c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 800.352627] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5b6d1c24368b494b805c1e3d4eccfb4c [ 800.566272] env[62096]: DEBUG nova.compute.manager [req-8f4984b6-756b-41cd-b890-21e2d2a006d3 req-67c40c09-64e9-494c-b326-ffa78d630d5b service nova] [instance: f050f0de-dc84-4825-b490-eafe522354cd] Received event network-vif-deleted-fc98466e-f50e-479f-8ee2-97a380fef60d {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 800.679390] env[62096]: DEBUG nova.scheduler.client.report [None req-534ea4b7-f1b9-4ff7-be62-1798f34a685d tempest-ServerAddressesTestJSON-1842394723 tempest-ServerAddressesTestJSON-1842394723-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 800.681903] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-534ea4b7-f1b9-4ff7-be62-1798f34a685d tempest-ServerAddressesTestJSON-1842394723 tempest-ServerAddressesTestJSON-1842394723-project-member] Expecting reply to msg 8f14de7988c14534a00890e3acaf9f32 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 800.700371] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8f14de7988c14534a00890e3acaf9f32 [ 800.810021] env[62096]: DEBUG nova.compute.manager [None req-35a36660-23cc-4f3d-b53e-6fe46ab70d44 tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] Start spawning the instance on the hypervisor. {{(pid=62096) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 800.843170] env[62096]: DEBUG nova.virt.hardware [None req-35a36660-23cc-4f3d-b53e-6fe46ab70d44 tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-19T11:52:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-19T11:51:55Z,direct_url=,disk_format='vmdk',id=fb764baa-3805-45c4-a694-aa91b0932110,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bd76085d004b408bbcf1ebf76ba2593e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-19T11:51:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 800.844125] env[62096]: DEBUG nova.virt.hardware [None req-35a36660-23cc-4f3d-b53e-6fe46ab70d44 tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Flavor limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 800.844376] env[62096]: DEBUG nova.virt.hardware [None req-35a36660-23cc-4f3d-b53e-6fe46ab70d44 tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Image limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 800.844909] env[62096]: DEBUG nova.virt.hardware [None req-35a36660-23cc-4f3d-b53e-6fe46ab70d44 tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Flavor pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 800.845133] env[62096]: DEBUG nova.virt.hardware [None req-35a36660-23cc-4f3d-b53e-6fe46ab70d44 tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Image pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 800.845342] env[62096]: DEBUG nova.virt.hardware [None req-35a36660-23cc-4f3d-b53e-6fe46ab70d44 tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 800.845843] env[62096]: DEBUG nova.virt.hardware [None req-35a36660-23cc-4f3d-b53e-6fe46ab70d44 tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 800.846084] env[62096]: DEBUG nova.virt.hardware [None req-35a36660-23cc-4f3d-b53e-6fe46ab70d44 tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 800.846779] env[62096]: DEBUG nova.virt.hardware [None req-35a36660-23cc-4f3d-b53e-6fe46ab70d44 tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Got 1 possible topologies {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 800.847016] env[62096]: DEBUG nova.virt.hardware [None req-35a36660-23cc-4f3d-b53e-6fe46ab70d44 tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 800.847250] env[62096]: DEBUG nova.virt.hardware [None req-35a36660-23cc-4f3d-b53e-6fe46ab70d44 tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 800.847917] env[62096]: DEBUG oslo_concurrency.lockutils [None req-2fec121e-bb5d-41f2-9f13-5023c9def8ac tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Releasing lock "refresh_cache-f050f0de-dc84-4825-b490-eafe522354cd" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 800.848499] env[62096]: DEBUG nova.compute.manager [None req-2fec121e-bb5d-41f2-9f13-5023c9def8ac tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] [instance: f050f0de-dc84-4825-b490-eafe522354cd] Start destroying the instance on the hypervisor. {{(pid=62096) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 800.848752] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-2fec121e-bb5d-41f2-9f13-5023c9def8ac tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] [instance: f050f0de-dc84-4825-b490-eafe522354cd] Destroying instance {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 800.849816] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5afd16f-9fdb-446a-b356-c9432cf6e2ad {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.853237] env[62096]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5ea3367d-de50-4914-9ecb-f3d1e65d81f4 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.861603] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06acc3a2-1f46-4e50-b900-0e5a885d1437 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.867824] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cfef592-1d09-462f-bdf9-0f68c7d531e5 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.894786] env[62096]: WARNING nova.virt.vmwareapi.vmops [None req-2fec121e-bb5d-41f2-9f13-5023c9def8ac tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] [instance: f050f0de-dc84-4825-b490-eafe522354cd] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance f050f0de-dc84-4825-b490-eafe522354cd could not be found. [ 800.894786] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-2fec121e-bb5d-41f2-9f13-5023c9def8ac tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] [instance: f050f0de-dc84-4825-b490-eafe522354cd] Instance destroyed {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 800.895024] env[62096]: INFO nova.compute.manager [None req-2fec121e-bb5d-41f2-9f13-5023c9def8ac tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] [instance: f050f0de-dc84-4825-b490-eafe522354cd] Took 0.05 seconds to destroy the instance on the hypervisor. [ 800.895321] env[62096]: DEBUG oslo.service.loopingcall [None req-2fec121e-bb5d-41f2-9f13-5023c9def8ac tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 800.895639] env[62096]: DEBUG nova.compute.manager [-] [instance: f050f0de-dc84-4825-b490-eafe522354cd] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 800.895683] env[62096]: DEBUG nova.network.neutron [-] [instance: f050f0de-dc84-4825-b490-eafe522354cd] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 800.925382] env[62096]: DEBUG nova.network.neutron [-] [instance: f050f0de-dc84-4825-b490-eafe522354cd] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 800.925987] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 30ea606ea1ca41a6bde092d328296f49 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 800.934168] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 30ea606ea1ca41a6bde092d328296f49 [ 801.185664] env[62096]: DEBUG oslo_concurrency.lockutils [None req-534ea4b7-f1b9-4ff7-be62-1798f34a685d tempest-ServerAddressesTestJSON-1842394723 tempest-ServerAddressesTestJSON-1842394723-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.406s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 801.186221] env[62096]: DEBUG nova.compute.manager [None req-534ea4b7-f1b9-4ff7-be62-1798f34a685d tempest-ServerAddressesTestJSON-1842394723 tempest-ServerAddressesTestJSON-1842394723-project-member] [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] Start building networks asynchronously for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 801.188508] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-534ea4b7-f1b9-4ff7-be62-1798f34a685d tempest-ServerAddressesTestJSON-1842394723 tempest-ServerAddressesTestJSON-1842394723-project-member] Expecting reply to msg 9433a69cba33449fa201bbd715914fc0 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 801.193942] env[62096]: DEBUG oslo_concurrency.lockutils [None req-f0b7d82a-4b7b-42e3-8d2d-3e9d2e37aade tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 26.399s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 801.193942] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-f0b7d82a-4b7b-42e3-8d2d-3e9d2e37aade tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg 0d136436a1424a15ba8d4b4c783de73a in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 801.221298] env[62096]: ERROR nova.compute.manager [None req-35a36660-23cc-4f3d-b53e-6fe46ab70d44 tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 956e7432-5428-47af-a76b-6187f602f8ba, please check neutron logs for more information. [ 801.221298] env[62096]: ERROR nova.compute.manager Traceback (most recent call last): [ 801.221298] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 801.221298] env[62096]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 801.221298] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 801.221298] env[62096]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 801.221298] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 801.221298] env[62096]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 801.221298] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 801.221298] env[62096]: ERROR nova.compute.manager self.force_reraise() [ 801.221298] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 801.221298] env[62096]: ERROR nova.compute.manager raise self.value [ 801.221298] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 801.221298] env[62096]: ERROR nova.compute.manager updated_port = self._update_port( [ 801.221298] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 801.221298] env[62096]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 801.221775] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 801.221775] env[62096]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 801.221775] env[62096]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 956e7432-5428-47af-a76b-6187f602f8ba, please check neutron logs for more information. [ 801.221775] env[62096]: ERROR nova.compute.manager [ 801.221775] env[62096]: Traceback (most recent call last): [ 801.221775] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 801.221775] env[62096]: listener.cb(fileno) [ 801.221775] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 801.221775] env[62096]: result = function(*args, **kwargs) [ 801.221775] env[62096]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 801.221775] env[62096]: return func(*args, **kwargs) [ 801.221775] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 801.221775] env[62096]: raise e [ 801.221775] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 801.221775] env[62096]: nwinfo = self.network_api.allocate_for_instance( [ 801.221775] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 801.221775] env[62096]: created_port_ids = self._update_ports_for_instance( [ 801.221775] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 801.221775] env[62096]: with excutils.save_and_reraise_exception(): [ 801.221775] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 801.221775] env[62096]: self.force_reraise() [ 801.221775] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 801.221775] env[62096]: raise self.value [ 801.221775] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 801.221775] env[62096]: updated_port = self._update_port( [ 801.221775] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 801.221775] env[62096]: _ensure_no_port_binding_failure(port) [ 801.221775] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 801.221775] env[62096]: raise exception.PortBindingFailed(port_id=port['id']) [ 801.222447] env[62096]: nova.exception.PortBindingFailed: Binding failed for port 956e7432-5428-47af-a76b-6187f602f8ba, please check neutron logs for more information. [ 801.222447] env[62096]: Removing descriptor: 16 [ 801.222447] env[62096]: ERROR nova.compute.manager [None req-35a36660-23cc-4f3d-b53e-6fe46ab70d44 tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 956e7432-5428-47af-a76b-6187f602f8ba, please check neutron logs for more information. [ 801.222447] env[62096]: ERROR nova.compute.manager [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] Traceback (most recent call last): [ 801.222447] env[62096]: ERROR nova.compute.manager [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 801.222447] env[62096]: ERROR nova.compute.manager [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] yield resources [ 801.222447] env[62096]: ERROR nova.compute.manager [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 801.222447] env[62096]: ERROR nova.compute.manager [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] self.driver.spawn(context, instance, image_meta, [ 801.222447] env[62096]: ERROR nova.compute.manager [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 801.222447] env[62096]: ERROR nova.compute.manager [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 801.222447] env[62096]: ERROR nova.compute.manager [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 801.222447] env[62096]: ERROR nova.compute.manager [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] vm_ref = self.build_virtual_machine(instance, [ 801.222746] env[62096]: ERROR nova.compute.manager [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 801.222746] env[62096]: ERROR nova.compute.manager [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] vif_infos = vmwarevif.get_vif_info(self._session, [ 801.222746] env[62096]: ERROR nova.compute.manager [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 801.222746] env[62096]: ERROR nova.compute.manager [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] for vif in network_info: [ 801.222746] env[62096]: ERROR nova.compute.manager [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 801.222746] env[62096]: ERROR nova.compute.manager [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] return self._sync_wrapper(fn, *args, **kwargs) [ 801.222746] env[62096]: ERROR nova.compute.manager [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 801.222746] env[62096]: ERROR nova.compute.manager [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] self.wait() [ 801.222746] env[62096]: ERROR nova.compute.manager [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 801.222746] env[62096]: ERROR nova.compute.manager [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] self[:] = self._gt.wait() [ 801.222746] env[62096]: ERROR nova.compute.manager [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 801.222746] env[62096]: ERROR nova.compute.manager [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] return self._exit_event.wait() [ 801.222746] env[62096]: ERROR nova.compute.manager [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 801.223068] env[62096]: ERROR nova.compute.manager [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] result = hub.switch() [ 801.223068] env[62096]: ERROR nova.compute.manager [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 801.223068] env[62096]: ERROR nova.compute.manager [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] return self.greenlet.switch() [ 801.223068] env[62096]: ERROR nova.compute.manager [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 801.223068] env[62096]: ERROR nova.compute.manager [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] result = function(*args, **kwargs) [ 801.223068] env[62096]: ERROR nova.compute.manager [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 801.223068] env[62096]: ERROR nova.compute.manager [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] return func(*args, **kwargs) [ 801.223068] env[62096]: ERROR nova.compute.manager [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 801.223068] env[62096]: ERROR nova.compute.manager [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] raise e [ 801.223068] env[62096]: ERROR nova.compute.manager [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 801.223068] env[62096]: ERROR nova.compute.manager [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] nwinfo = self.network_api.allocate_for_instance( [ 801.223068] env[62096]: ERROR nova.compute.manager [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 801.223068] env[62096]: ERROR nova.compute.manager [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] created_port_ids = self._update_ports_for_instance( [ 801.223380] env[62096]: ERROR nova.compute.manager [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 801.223380] env[62096]: ERROR nova.compute.manager [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] with excutils.save_and_reraise_exception(): [ 801.223380] env[62096]: ERROR nova.compute.manager [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 801.223380] env[62096]: ERROR nova.compute.manager [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] self.force_reraise() [ 801.223380] env[62096]: ERROR nova.compute.manager [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 801.223380] env[62096]: ERROR nova.compute.manager [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] raise self.value [ 801.223380] env[62096]: ERROR nova.compute.manager [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 801.223380] env[62096]: ERROR nova.compute.manager [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] updated_port = self._update_port( [ 801.223380] env[62096]: ERROR nova.compute.manager [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 801.223380] env[62096]: ERROR nova.compute.manager [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] _ensure_no_port_binding_failure(port) [ 801.223380] env[62096]: ERROR nova.compute.manager [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 801.223380] env[62096]: ERROR nova.compute.manager [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] raise exception.PortBindingFailed(port_id=port['id']) [ 801.223660] env[62096]: ERROR nova.compute.manager [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] nova.exception.PortBindingFailed: Binding failed for port 956e7432-5428-47af-a76b-6187f602f8ba, please check neutron logs for more information. [ 801.223660] env[62096]: ERROR nova.compute.manager [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] [ 801.223660] env[62096]: INFO nova.compute.manager [None req-35a36660-23cc-4f3d-b53e-6fe46ab70d44 tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] Terminating instance [ 801.224562] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9433a69cba33449fa201bbd715914fc0 [ 801.225145] env[62096]: DEBUG oslo_concurrency.lockutils [None req-35a36660-23cc-4f3d-b53e-6fe46ab70d44 tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Acquiring lock "refresh_cache-3283ae52-1a1f-4dec-91f3-44cc42361bb5" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 801.225299] env[62096]: DEBUG oslo_concurrency.lockutils [None req-35a36660-23cc-4f3d-b53e-6fe46ab70d44 tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Acquired lock "refresh_cache-3283ae52-1a1f-4dec-91f3-44cc42361bb5" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 801.225457] env[62096]: DEBUG nova.network.neutron [None req-35a36660-23cc-4f3d-b53e-6fe46ab70d44 tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 801.225854] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-35a36660-23cc-4f3d-b53e-6fe46ab70d44 tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Expecting reply to msg 4bc1e1d182ba4f09b3fa09be8f7891bf in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 801.226933] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0d136436a1424a15ba8d4b4c783de73a [ 801.230921] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4bc1e1d182ba4f09b3fa09be8f7891bf [ 801.429214] env[62096]: DEBUG nova.network.neutron [-] [instance: f050f0de-dc84-4825-b490-eafe522354cd] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 801.429693] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 8f59a1af387c4b5b9a0714a106479b30 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 801.437840] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8f59a1af387c4b5b9a0714a106479b30 [ 801.694574] env[62096]: DEBUG nova.compute.utils [None req-534ea4b7-f1b9-4ff7-be62-1798f34a685d tempest-ServerAddressesTestJSON-1842394723 tempest-ServerAddressesTestJSON-1842394723-project-member] Using /dev/sd instead of None {{(pid=62096) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 801.695145] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-534ea4b7-f1b9-4ff7-be62-1798f34a685d tempest-ServerAddressesTestJSON-1842394723 tempest-ServerAddressesTestJSON-1842394723-project-member] Expecting reply to msg 7c67eeabf1834ae6af2a4493104cbe62 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 801.699083] env[62096]: DEBUG nova.compute.manager [None req-534ea4b7-f1b9-4ff7-be62-1798f34a685d tempest-ServerAddressesTestJSON-1842394723 tempest-ServerAddressesTestJSON-1842394723-project-member] [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] Allocating IP information in the background. {{(pid=62096) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 801.699230] env[62096]: DEBUG nova.network.neutron [None req-534ea4b7-f1b9-4ff7-be62-1798f34a685d tempest-ServerAddressesTestJSON-1842394723 tempest-ServerAddressesTestJSON-1842394723-project-member] [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] allocate_for_instance() {{(pid=62096) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 801.704428] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7c67eeabf1834ae6af2a4493104cbe62 [ 801.744491] env[62096]: DEBUG nova.policy [None req-534ea4b7-f1b9-4ff7-be62-1798f34a685d tempest-ServerAddressesTestJSON-1842394723 tempest-ServerAddressesTestJSON-1842394723-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '39175a0020934942b1944bd856126b31', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e07ae35535744e169ff8c85da1a5fde3', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62096) authorize /opt/stack/nova/nova/policy.py:203}} [ 801.795399] env[62096]: DEBUG nova.network.neutron [None req-35a36660-23cc-4f3d-b53e-6fe46ab70d44 tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 801.932628] env[62096]: INFO nova.compute.manager [-] [instance: f050f0de-dc84-4825-b490-eafe522354cd] Took 1.04 seconds to deallocate network for instance. [ 801.934894] env[62096]: DEBUG nova.compute.claims [None req-2fec121e-bb5d-41f2-9f13-5023c9def8ac tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] [instance: f050f0de-dc84-4825-b490-eafe522354cd] Aborting claim: {{(pid=62096) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 801.934894] env[62096]: DEBUG oslo_concurrency.lockutils [None req-2fec121e-bb5d-41f2-9f13-5023c9def8ac tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 801.994417] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2022ec9e-d3a5-47ab-9ff0-276db636b7cf {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.002684] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f029d4c-5fb2-465e-9957-84e043181ff4 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.033721] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33e52802-b637-41f1-84b2-e277db895eda {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.041284] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-211030c3-dfa0-492d-9569-1bd4ef2ae3b7 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.057379] env[62096]: DEBUG nova.compute.provider_tree [None req-f0b7d82a-4b7b-42e3-8d2d-3e9d2e37aade tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 802.057379] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-f0b7d82a-4b7b-42e3-8d2d-3e9d2e37aade tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg 973047df1d664fccaa6f6b3cfe9b3eab in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 802.068176] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 973047df1d664fccaa6f6b3cfe9b3eab [ 802.200390] env[62096]: DEBUG nova.compute.manager [None req-534ea4b7-f1b9-4ff7-be62-1798f34a685d tempest-ServerAddressesTestJSON-1842394723 tempest-ServerAddressesTestJSON-1842394723-project-member] [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] Start building block device mappings for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 802.202213] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-534ea4b7-f1b9-4ff7-be62-1798f34a685d tempest-ServerAddressesTestJSON-1842394723 tempest-ServerAddressesTestJSON-1842394723-project-member] Expecting reply to msg 0f1d68ee495b4d23b2b7f1256622b333 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 802.251981] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0f1d68ee495b4d23b2b7f1256622b333 [ 802.263380] env[62096]: DEBUG nova.network.neutron [None req-35a36660-23cc-4f3d-b53e-6fe46ab70d44 tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 802.263885] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-35a36660-23cc-4f3d-b53e-6fe46ab70d44 tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Expecting reply to msg e15609393f5b4f75b6b54f1011e02149 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 802.272010] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e15609393f5b4f75b6b54f1011e02149 [ 802.356718] env[62096]: DEBUG nova.network.neutron [None req-534ea4b7-f1b9-4ff7-be62-1798f34a685d tempest-ServerAddressesTestJSON-1842394723 tempest-ServerAddressesTestJSON-1842394723-project-member] [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] Successfully created port: 0be010ec-25ec-41be-b4fc-96b43c921160 {{(pid=62096) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 802.560487] env[62096]: DEBUG nova.scheduler.client.report [None req-f0b7d82a-4b7b-42e3-8d2d-3e9d2e37aade tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 802.562847] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-f0b7d82a-4b7b-42e3-8d2d-3e9d2e37aade tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg 30ea300bed554241b675889236ae7b54 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 802.577791] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 30ea300bed554241b675889236ae7b54 [ 802.634933] env[62096]: DEBUG nova.compute.manager [req-b6b858ad-34b8-43db-ac90-40e50a8be534 req-9130d74c-2e4b-4445-b806-028ba16f88f9 service nova] [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] Received event network-changed-956e7432-5428-47af-a76b-6187f602f8ba {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 802.635126] env[62096]: DEBUG nova.compute.manager [req-b6b858ad-34b8-43db-ac90-40e50a8be534 req-9130d74c-2e4b-4445-b806-028ba16f88f9 service nova] [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] Refreshing instance network info cache due to event network-changed-956e7432-5428-47af-a76b-6187f602f8ba. {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 802.635361] env[62096]: DEBUG oslo_concurrency.lockutils [req-b6b858ad-34b8-43db-ac90-40e50a8be534 req-9130d74c-2e4b-4445-b806-028ba16f88f9 service nova] Acquiring lock "refresh_cache-3283ae52-1a1f-4dec-91f3-44cc42361bb5" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 802.711422] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-534ea4b7-f1b9-4ff7-be62-1798f34a685d tempest-ServerAddressesTestJSON-1842394723 tempest-ServerAddressesTestJSON-1842394723-project-member] Expecting reply to msg 36e84b4f6458409ea5011b64d6b778c5 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 802.745781] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 36e84b4f6458409ea5011b64d6b778c5 [ 802.765602] env[62096]: DEBUG oslo_concurrency.lockutils [None req-35a36660-23cc-4f3d-b53e-6fe46ab70d44 tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Releasing lock "refresh_cache-3283ae52-1a1f-4dec-91f3-44cc42361bb5" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 802.766071] env[62096]: DEBUG nova.compute.manager [None req-35a36660-23cc-4f3d-b53e-6fe46ab70d44 tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] Start destroying the instance on the hypervisor. {{(pid=62096) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 802.766267] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-35a36660-23cc-4f3d-b53e-6fe46ab70d44 tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] Destroying instance {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 802.766567] env[62096]: DEBUG oslo_concurrency.lockutils [req-b6b858ad-34b8-43db-ac90-40e50a8be534 req-9130d74c-2e4b-4445-b806-028ba16f88f9 service nova] Acquired lock "refresh_cache-3283ae52-1a1f-4dec-91f3-44cc42361bb5" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 802.766737] env[62096]: DEBUG nova.network.neutron [req-b6b858ad-34b8-43db-ac90-40e50a8be534 req-9130d74c-2e4b-4445-b806-028ba16f88f9 service nova] [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] Refreshing network info cache for port 956e7432-5428-47af-a76b-6187f602f8ba {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 802.767218] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-b6b858ad-34b8-43db-ac90-40e50a8be534 req-9130d74c-2e4b-4445-b806-028ba16f88f9 service nova] Expecting reply to msg 7e576f91376548cd8492f482d0d7a380 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 802.768551] env[62096]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4c5fbd55-1cfb-49c7-a001-71908978fb5d {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.773750] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7e576f91376548cd8492f482d0d7a380 [ 802.779743] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae9c492a-2464-4503-984b-b291131be44a {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.800792] env[62096]: WARNING nova.virt.vmwareapi.vmops [None req-35a36660-23cc-4f3d-b53e-6fe46ab70d44 tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 3283ae52-1a1f-4dec-91f3-44cc42361bb5 could not be found. [ 802.801008] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-35a36660-23cc-4f3d-b53e-6fe46ab70d44 tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] Instance destroyed {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 802.801185] env[62096]: INFO nova.compute.manager [None req-35a36660-23cc-4f3d-b53e-6fe46ab70d44 tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] Took 0.03 seconds to destroy the instance on the hypervisor. [ 802.801413] env[62096]: DEBUG oslo.service.loopingcall [None req-35a36660-23cc-4f3d-b53e-6fe46ab70d44 tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 802.801625] env[62096]: DEBUG nova.compute.manager [-] [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 802.801720] env[62096]: DEBUG nova.network.neutron [-] [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 802.817990] env[62096]: DEBUG nova.network.neutron [-] [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 802.818544] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 1999eb052fce4576b249e9c6fe92f8b8 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 802.827386] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1999eb052fce4576b249e9c6fe92f8b8 [ 803.070380] env[62096]: DEBUG oslo_concurrency.lockutils [None req-f0b7d82a-4b7b-42e3-8d2d-3e9d2e37aade tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.881s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 803.071055] env[62096]: ERROR nova.compute.manager [None req-f0b7d82a-4b7b-42e3-8d2d-3e9d2e37aade tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port f4e2ec88-4a20-4948-93f1-f9ff4ba04d70, please check neutron logs for more information. [ 803.071055] env[62096]: ERROR nova.compute.manager [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] Traceback (most recent call last): [ 803.071055] env[62096]: ERROR nova.compute.manager [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 803.071055] env[62096]: ERROR nova.compute.manager [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] self.driver.spawn(context, instance, image_meta, [ 803.071055] env[62096]: ERROR nova.compute.manager [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 803.071055] env[62096]: ERROR nova.compute.manager [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 803.071055] env[62096]: ERROR nova.compute.manager [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 803.071055] env[62096]: ERROR nova.compute.manager [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] vm_ref = self.build_virtual_machine(instance, [ 803.071055] env[62096]: ERROR nova.compute.manager [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 803.071055] env[62096]: ERROR nova.compute.manager [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] vif_infos = vmwarevif.get_vif_info(self._session, [ 803.071055] env[62096]: ERROR nova.compute.manager [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 803.071338] env[62096]: ERROR nova.compute.manager [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] for vif in network_info: [ 803.071338] env[62096]: ERROR nova.compute.manager [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 803.071338] env[62096]: ERROR nova.compute.manager [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] return self._sync_wrapper(fn, *args, **kwargs) [ 803.071338] env[62096]: ERROR nova.compute.manager [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 803.071338] env[62096]: ERROR nova.compute.manager [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] self.wait() [ 803.071338] env[62096]: ERROR nova.compute.manager [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 803.071338] env[62096]: ERROR nova.compute.manager [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] self[:] = self._gt.wait() [ 803.071338] env[62096]: ERROR nova.compute.manager [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 803.071338] env[62096]: ERROR nova.compute.manager [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] return self._exit_event.wait() [ 803.071338] env[62096]: ERROR nova.compute.manager [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 803.071338] env[62096]: ERROR nova.compute.manager [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] result = hub.switch() [ 803.071338] env[62096]: ERROR nova.compute.manager [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 803.071338] env[62096]: ERROR nova.compute.manager [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] return self.greenlet.switch() [ 803.071734] env[62096]: ERROR nova.compute.manager [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 803.071734] env[62096]: ERROR nova.compute.manager [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] result = function(*args, **kwargs) [ 803.071734] env[62096]: ERROR nova.compute.manager [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 803.071734] env[62096]: ERROR nova.compute.manager [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] return func(*args, **kwargs) [ 803.071734] env[62096]: ERROR nova.compute.manager [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 803.071734] env[62096]: ERROR nova.compute.manager [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] raise e [ 803.071734] env[62096]: ERROR nova.compute.manager [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 803.071734] env[62096]: ERROR nova.compute.manager [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] nwinfo = self.network_api.allocate_for_instance( [ 803.071734] env[62096]: ERROR nova.compute.manager [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 803.071734] env[62096]: ERROR nova.compute.manager [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] created_port_ids = self._update_ports_for_instance( [ 803.071734] env[62096]: ERROR nova.compute.manager [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 803.071734] env[62096]: ERROR nova.compute.manager [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] with excutils.save_and_reraise_exception(): [ 803.071734] env[62096]: ERROR nova.compute.manager [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 803.072066] env[62096]: ERROR nova.compute.manager [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] self.force_reraise() [ 803.072066] env[62096]: ERROR nova.compute.manager [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 803.072066] env[62096]: ERROR nova.compute.manager [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] raise self.value [ 803.072066] env[62096]: ERROR nova.compute.manager [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 803.072066] env[62096]: ERROR nova.compute.manager [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] updated_port = self._update_port( [ 803.072066] env[62096]: ERROR nova.compute.manager [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 803.072066] env[62096]: ERROR nova.compute.manager [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] _ensure_no_port_binding_failure(port) [ 803.072066] env[62096]: ERROR nova.compute.manager [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 803.072066] env[62096]: ERROR nova.compute.manager [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] raise exception.PortBindingFailed(port_id=port['id']) [ 803.072066] env[62096]: ERROR nova.compute.manager [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] nova.exception.PortBindingFailed: Binding failed for port f4e2ec88-4a20-4948-93f1-f9ff4ba04d70, please check neutron logs for more information. [ 803.072066] env[62096]: ERROR nova.compute.manager [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] [ 803.072432] env[62096]: DEBUG nova.compute.utils [None req-f0b7d82a-4b7b-42e3-8d2d-3e9d2e37aade tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] Binding failed for port f4e2ec88-4a20-4948-93f1-f9ff4ba04d70, please check neutron logs for more information. {{(pid=62096) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 803.073085] env[62096]: DEBUG oslo_concurrency.lockutils [None req-36b7f3a2-925c-4aea-b829-50ed82eff8b4 tempest-ImagesNegativeTestJSON-1271305456 tempest-ImagesNegativeTestJSON-1271305456-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 25.935s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 803.074901] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-36b7f3a2-925c-4aea-b829-50ed82eff8b4 tempest-ImagesNegativeTestJSON-1271305456 tempest-ImagesNegativeTestJSON-1271305456-project-member] Expecting reply to msg 8905a45f27fb49b7903b70cd8f211823 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 803.076079] env[62096]: DEBUG nova.compute.manager [None req-f0b7d82a-4b7b-42e3-8d2d-3e9d2e37aade tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] Build of instance de88f8a1-20a5-49f9-adcb-de48aeaa548a was re-scheduled: Binding failed for port f4e2ec88-4a20-4948-93f1-f9ff4ba04d70, please check neutron logs for more information. {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 803.076512] env[62096]: DEBUG nova.compute.manager [None req-f0b7d82a-4b7b-42e3-8d2d-3e9d2e37aade tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] Unplugging VIFs for instance {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 803.076734] env[62096]: DEBUG oslo_concurrency.lockutils [None req-f0b7d82a-4b7b-42e3-8d2d-3e9d2e37aade tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Acquiring lock "refresh_cache-de88f8a1-20a5-49f9-adcb-de48aeaa548a" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 803.076878] env[62096]: DEBUG oslo_concurrency.lockutils [None req-f0b7d82a-4b7b-42e3-8d2d-3e9d2e37aade tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Acquired lock "refresh_cache-de88f8a1-20a5-49f9-adcb-de48aeaa548a" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 803.077032] env[62096]: DEBUG nova.network.neutron [None req-f0b7d82a-4b7b-42e3-8d2d-3e9d2e37aade tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 803.077396] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-f0b7d82a-4b7b-42e3-8d2d-3e9d2e37aade tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg 6e2a2072e39b46fa8e0548f201c92dbd in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 803.086403] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6e2a2072e39b46fa8e0548f201c92dbd [ 803.116846] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8905a45f27fb49b7903b70cd8f211823 [ 803.214599] env[62096]: DEBUG nova.compute.manager [None req-534ea4b7-f1b9-4ff7-be62-1798f34a685d tempest-ServerAddressesTestJSON-1842394723 tempest-ServerAddressesTestJSON-1842394723-project-member] [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] Start spawning the instance on the hypervisor. {{(pid=62096) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 803.239041] env[62096]: DEBUG nova.virt.hardware [None req-534ea4b7-f1b9-4ff7-be62-1798f34a685d tempest-ServerAddressesTestJSON-1842394723 tempest-ServerAddressesTestJSON-1842394723-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-19T11:52:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-19T11:51:55Z,direct_url=,disk_format='vmdk',id=fb764baa-3805-45c4-a694-aa91b0932110,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bd76085d004b408bbcf1ebf76ba2593e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-19T11:51:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 803.239292] env[62096]: DEBUG nova.virt.hardware [None req-534ea4b7-f1b9-4ff7-be62-1798f34a685d tempest-ServerAddressesTestJSON-1842394723 tempest-ServerAddressesTestJSON-1842394723-project-member] Flavor limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 803.239444] env[62096]: DEBUG nova.virt.hardware [None req-534ea4b7-f1b9-4ff7-be62-1798f34a685d tempest-ServerAddressesTestJSON-1842394723 tempest-ServerAddressesTestJSON-1842394723-project-member] Image limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 803.239617] env[62096]: DEBUG nova.virt.hardware [None req-534ea4b7-f1b9-4ff7-be62-1798f34a685d tempest-ServerAddressesTestJSON-1842394723 tempest-ServerAddressesTestJSON-1842394723-project-member] Flavor pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 803.239756] env[62096]: DEBUG nova.virt.hardware [None req-534ea4b7-f1b9-4ff7-be62-1798f34a685d tempest-ServerAddressesTestJSON-1842394723 tempest-ServerAddressesTestJSON-1842394723-project-member] Image pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 803.239894] env[62096]: DEBUG nova.virt.hardware [None req-534ea4b7-f1b9-4ff7-be62-1798f34a685d tempest-ServerAddressesTestJSON-1842394723 tempest-ServerAddressesTestJSON-1842394723-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 803.240120] env[62096]: DEBUG nova.virt.hardware [None req-534ea4b7-f1b9-4ff7-be62-1798f34a685d tempest-ServerAddressesTestJSON-1842394723 tempest-ServerAddressesTestJSON-1842394723-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 803.240277] env[62096]: DEBUG nova.virt.hardware [None req-534ea4b7-f1b9-4ff7-be62-1798f34a685d tempest-ServerAddressesTestJSON-1842394723 tempest-ServerAddressesTestJSON-1842394723-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 803.240475] env[62096]: DEBUG nova.virt.hardware [None req-534ea4b7-f1b9-4ff7-be62-1798f34a685d tempest-ServerAddressesTestJSON-1842394723 tempest-ServerAddressesTestJSON-1842394723-project-member] Got 1 possible topologies {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 803.240579] env[62096]: DEBUG nova.virt.hardware [None req-534ea4b7-f1b9-4ff7-be62-1798f34a685d tempest-ServerAddressesTestJSON-1842394723 tempest-ServerAddressesTestJSON-1842394723-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 803.240742] env[62096]: DEBUG nova.virt.hardware [None req-534ea4b7-f1b9-4ff7-be62-1798f34a685d tempest-ServerAddressesTestJSON-1842394723 tempest-ServerAddressesTestJSON-1842394723-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 803.241870] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4ae5482-fd6b-4bbb-9cd4-38351c4905f4 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.252300] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c34afc42-9848-46cf-a87d-d63ab94f096c {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.298365] env[62096]: DEBUG nova.network.neutron [req-b6b858ad-34b8-43db-ac90-40e50a8be534 req-9130d74c-2e4b-4445-b806-028ba16f88f9 service nova] [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 803.321977] env[62096]: DEBUG nova.network.neutron [-] [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 803.322477] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 6ccfbac6b14f40a4b68923b14893fec4 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 803.333280] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6ccfbac6b14f40a4b68923b14893fec4 [ 803.415604] env[62096]: DEBUG nova.network.neutron [req-b6b858ad-34b8-43db-ac90-40e50a8be534 req-9130d74c-2e4b-4445-b806-028ba16f88f9 service nova] [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 803.416322] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-b6b858ad-34b8-43db-ac90-40e50a8be534 req-9130d74c-2e4b-4445-b806-028ba16f88f9 service nova] Expecting reply to msg 8bbdec0aaf324197a002b2608a1e2b55 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 803.424411] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8bbdec0aaf324197a002b2608a1e2b55 [ 803.550186] env[62096]: ERROR nova.compute.manager [None req-534ea4b7-f1b9-4ff7-be62-1798f34a685d tempest-ServerAddressesTestJSON-1842394723 tempest-ServerAddressesTestJSON-1842394723-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 0be010ec-25ec-41be-b4fc-96b43c921160, please check neutron logs for more information. [ 803.550186] env[62096]: ERROR nova.compute.manager Traceback (most recent call last): [ 803.550186] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 803.550186] env[62096]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 803.550186] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 803.550186] env[62096]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 803.550186] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 803.550186] env[62096]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 803.550186] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 803.550186] env[62096]: ERROR nova.compute.manager self.force_reraise() [ 803.550186] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 803.550186] env[62096]: ERROR nova.compute.manager raise self.value [ 803.550186] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 803.550186] env[62096]: ERROR nova.compute.manager updated_port = self._update_port( [ 803.550186] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 803.550186] env[62096]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 803.550759] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 803.550759] env[62096]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 803.550759] env[62096]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 0be010ec-25ec-41be-b4fc-96b43c921160, please check neutron logs for more information. [ 803.550759] env[62096]: ERROR nova.compute.manager [ 803.550759] env[62096]: Traceback (most recent call last): [ 803.550759] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 803.550759] env[62096]: listener.cb(fileno) [ 803.550759] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 803.550759] env[62096]: result = function(*args, **kwargs) [ 803.550759] env[62096]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 803.550759] env[62096]: return func(*args, **kwargs) [ 803.550759] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 803.550759] env[62096]: raise e [ 803.550759] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 803.550759] env[62096]: nwinfo = self.network_api.allocate_for_instance( [ 803.550759] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 803.550759] env[62096]: created_port_ids = self._update_ports_for_instance( [ 803.550759] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 803.550759] env[62096]: with excutils.save_and_reraise_exception(): [ 803.550759] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 803.550759] env[62096]: self.force_reraise() [ 803.550759] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 803.550759] env[62096]: raise self.value [ 803.550759] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 803.550759] env[62096]: updated_port = self._update_port( [ 803.550759] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 803.550759] env[62096]: _ensure_no_port_binding_failure(port) [ 803.550759] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 803.550759] env[62096]: raise exception.PortBindingFailed(port_id=port['id']) [ 803.551713] env[62096]: nova.exception.PortBindingFailed: Binding failed for port 0be010ec-25ec-41be-b4fc-96b43c921160, please check neutron logs for more information. [ 803.551713] env[62096]: Removing descriptor: 16 [ 803.551713] env[62096]: ERROR nova.compute.manager [None req-534ea4b7-f1b9-4ff7-be62-1798f34a685d tempest-ServerAddressesTestJSON-1842394723 tempest-ServerAddressesTestJSON-1842394723-project-member] [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 0be010ec-25ec-41be-b4fc-96b43c921160, please check neutron logs for more information. [ 803.551713] env[62096]: ERROR nova.compute.manager [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] Traceback (most recent call last): [ 803.551713] env[62096]: ERROR nova.compute.manager [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 803.551713] env[62096]: ERROR nova.compute.manager [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] yield resources [ 803.551713] env[62096]: ERROR nova.compute.manager [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 803.551713] env[62096]: ERROR nova.compute.manager [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] self.driver.spawn(context, instance, image_meta, [ 803.551713] env[62096]: ERROR nova.compute.manager [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 803.551713] env[62096]: ERROR nova.compute.manager [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] self._vmops.spawn(context, instance, image_meta, injected_files, [ 803.551713] env[62096]: ERROR nova.compute.manager [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 803.551713] env[62096]: ERROR nova.compute.manager [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] vm_ref = self.build_virtual_machine(instance, [ 803.552213] env[62096]: ERROR nova.compute.manager [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 803.552213] env[62096]: ERROR nova.compute.manager [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] vif_infos = vmwarevif.get_vif_info(self._session, [ 803.552213] env[62096]: ERROR nova.compute.manager [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 803.552213] env[62096]: ERROR nova.compute.manager [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] for vif in network_info: [ 803.552213] env[62096]: ERROR nova.compute.manager [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 803.552213] env[62096]: ERROR nova.compute.manager [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] return self._sync_wrapper(fn, *args, **kwargs) [ 803.552213] env[62096]: ERROR nova.compute.manager [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 803.552213] env[62096]: ERROR nova.compute.manager [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] self.wait() [ 803.552213] env[62096]: ERROR nova.compute.manager [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 803.552213] env[62096]: ERROR nova.compute.manager [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] self[:] = self._gt.wait() [ 803.552213] env[62096]: ERROR nova.compute.manager [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 803.552213] env[62096]: ERROR nova.compute.manager [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] return self._exit_event.wait() [ 803.552213] env[62096]: ERROR nova.compute.manager [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 803.552654] env[62096]: ERROR nova.compute.manager [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] result = hub.switch() [ 803.552654] env[62096]: ERROR nova.compute.manager [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 803.552654] env[62096]: ERROR nova.compute.manager [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] return self.greenlet.switch() [ 803.552654] env[62096]: ERROR nova.compute.manager [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 803.552654] env[62096]: ERROR nova.compute.manager [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] result = function(*args, **kwargs) [ 803.552654] env[62096]: ERROR nova.compute.manager [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 803.552654] env[62096]: ERROR nova.compute.manager [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] return func(*args, **kwargs) [ 803.552654] env[62096]: ERROR nova.compute.manager [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 803.552654] env[62096]: ERROR nova.compute.manager [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] raise e [ 803.552654] env[62096]: ERROR nova.compute.manager [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 803.552654] env[62096]: ERROR nova.compute.manager [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] nwinfo = self.network_api.allocate_for_instance( [ 803.552654] env[62096]: ERROR nova.compute.manager [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 803.552654] env[62096]: ERROR nova.compute.manager [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] created_port_ids = self._update_ports_for_instance( [ 803.553092] env[62096]: ERROR nova.compute.manager [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 803.553092] env[62096]: ERROR nova.compute.manager [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] with excutils.save_and_reraise_exception(): [ 803.553092] env[62096]: ERROR nova.compute.manager [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 803.553092] env[62096]: ERROR nova.compute.manager [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] self.force_reraise() [ 803.553092] env[62096]: ERROR nova.compute.manager [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 803.553092] env[62096]: ERROR nova.compute.manager [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] raise self.value [ 803.553092] env[62096]: ERROR nova.compute.manager [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 803.553092] env[62096]: ERROR nova.compute.manager [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] updated_port = self._update_port( [ 803.553092] env[62096]: ERROR nova.compute.manager [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 803.553092] env[62096]: ERROR nova.compute.manager [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] _ensure_no_port_binding_failure(port) [ 803.553092] env[62096]: ERROR nova.compute.manager [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 803.553092] env[62096]: ERROR nova.compute.manager [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] raise exception.PortBindingFailed(port_id=port['id']) [ 803.553510] env[62096]: ERROR nova.compute.manager [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] nova.exception.PortBindingFailed: Binding failed for port 0be010ec-25ec-41be-b4fc-96b43c921160, please check neutron logs for more information. [ 803.553510] env[62096]: ERROR nova.compute.manager [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] [ 803.553510] env[62096]: INFO nova.compute.manager [None req-534ea4b7-f1b9-4ff7-be62-1798f34a685d tempest-ServerAddressesTestJSON-1842394723 tempest-ServerAddressesTestJSON-1842394723-project-member] [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] Terminating instance [ 803.554196] env[62096]: DEBUG oslo_concurrency.lockutils [None req-534ea4b7-f1b9-4ff7-be62-1798f34a685d tempest-ServerAddressesTestJSON-1842394723 tempest-ServerAddressesTestJSON-1842394723-project-member] Acquiring lock "refresh_cache-c928bf83-9517-449a-854c-6f3d8ce4faa0" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 803.554480] env[62096]: DEBUG oslo_concurrency.lockutils [None req-534ea4b7-f1b9-4ff7-be62-1798f34a685d tempest-ServerAddressesTestJSON-1842394723 tempest-ServerAddressesTestJSON-1842394723-project-member] Acquired lock "refresh_cache-c928bf83-9517-449a-854c-6f3d8ce4faa0" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 803.554763] env[62096]: DEBUG nova.network.neutron [None req-534ea4b7-f1b9-4ff7-be62-1798f34a685d tempest-ServerAddressesTestJSON-1842394723 tempest-ServerAddressesTestJSON-1842394723-project-member] [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 803.555414] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-534ea4b7-f1b9-4ff7-be62-1798f34a685d tempest-ServerAddressesTestJSON-1842394723 tempest-ServerAddressesTestJSON-1842394723-project-member] Expecting reply to msg ec81898f66a943b4a5fa1423122697fa in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 803.563205] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ec81898f66a943b4a5fa1423122697fa [ 803.596960] env[62096]: DEBUG nova.network.neutron [None req-534ea4b7-f1b9-4ff7-be62-1798f34a685d tempest-ServerAddressesTestJSON-1842394723 tempest-ServerAddressesTestJSON-1842394723-project-member] [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 803.647245] env[62096]: DEBUG nova.network.neutron [None req-f0b7d82a-4b7b-42e3-8d2d-3e9d2e37aade tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 803.735763] env[62096]: DEBUG nova.network.neutron [None req-f0b7d82a-4b7b-42e3-8d2d-3e9d2e37aade tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 803.736317] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-f0b7d82a-4b7b-42e3-8d2d-3e9d2e37aade tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg e30bee1f4a4947109dc5ae530b15b453 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 803.744071] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e30bee1f4a4947109dc5ae530b15b453 [ 803.787311] env[62096]: DEBUG nova.network.neutron [None req-534ea4b7-f1b9-4ff7-be62-1798f34a685d tempest-ServerAddressesTestJSON-1842394723 tempest-ServerAddressesTestJSON-1842394723-project-member] [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 803.787903] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-534ea4b7-f1b9-4ff7-be62-1798f34a685d tempest-ServerAddressesTestJSON-1842394723 tempest-ServerAddressesTestJSON-1842394723-project-member] Expecting reply to msg 6fb7a008433040a88ec079682f43f8d9 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 803.797486] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6fb7a008433040a88ec079682f43f8d9 [ 803.824921] env[62096]: INFO nova.compute.manager [-] [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] Took 1.02 seconds to deallocate network for instance. [ 803.827094] env[62096]: DEBUG nova.compute.claims [None req-35a36660-23cc-4f3d-b53e-6fe46ab70d44 tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] Aborting claim: {{(pid=62096) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 803.827278] env[62096]: DEBUG oslo_concurrency.lockutils [None req-35a36660-23cc-4f3d-b53e-6fe46ab70d44 tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 803.899298] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4a36ff5-8097-4f36-b6d4-2eaa8efc5e1e {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.907310] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dea4a634-3d95-4ebe-a0d7-6916748468c9 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.938303] env[62096]: DEBUG oslo_concurrency.lockutils [req-b6b858ad-34b8-43db-ac90-40e50a8be534 req-9130d74c-2e4b-4445-b806-028ba16f88f9 service nova] Releasing lock "refresh_cache-3283ae52-1a1f-4dec-91f3-44cc42361bb5" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 803.938551] env[62096]: DEBUG nova.compute.manager [req-b6b858ad-34b8-43db-ac90-40e50a8be534 req-9130d74c-2e4b-4445-b806-028ba16f88f9 service nova] [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] Received event network-vif-deleted-956e7432-5428-47af-a76b-6187f602f8ba {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 803.939869] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2727523f-f5ad-4350-af72-8f243c22a39d {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.946801] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d771dd16-00fc-4751-91a8-e5416c528207 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.959687] env[62096]: DEBUG nova.compute.provider_tree [None req-36b7f3a2-925c-4aea-b829-50ed82eff8b4 tempest-ImagesNegativeTestJSON-1271305456 tempest-ImagesNegativeTestJSON-1271305456-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 803.971640] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-36b7f3a2-925c-4aea-b829-50ed82eff8b4 tempest-ImagesNegativeTestJSON-1271305456 tempest-ImagesNegativeTestJSON-1271305456-project-member] Expecting reply to msg ed9d7b6027024a53a208e9f897a76093 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 803.971640] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ed9d7b6027024a53a208e9f897a76093 [ 804.238932] env[62096]: DEBUG oslo_concurrency.lockutils [None req-f0b7d82a-4b7b-42e3-8d2d-3e9d2e37aade tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Releasing lock "refresh_cache-de88f8a1-20a5-49f9-adcb-de48aeaa548a" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 804.239183] env[62096]: DEBUG nova.compute.manager [None req-f0b7d82a-4b7b-42e3-8d2d-3e9d2e37aade tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 804.239370] env[62096]: DEBUG nova.compute.manager [None req-f0b7d82a-4b7b-42e3-8d2d-3e9d2e37aade tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 804.239536] env[62096]: DEBUG nova.network.neutron [None req-f0b7d82a-4b7b-42e3-8d2d-3e9d2e37aade tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 804.255648] env[62096]: DEBUG nova.network.neutron [None req-f0b7d82a-4b7b-42e3-8d2d-3e9d2e37aade tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 804.256342] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-f0b7d82a-4b7b-42e3-8d2d-3e9d2e37aade tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg 2ab7b0459df645e685a945b0b502af4f in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 804.264233] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2ab7b0459df645e685a945b0b502af4f [ 804.291698] env[62096]: DEBUG oslo_concurrency.lockutils [None req-534ea4b7-f1b9-4ff7-be62-1798f34a685d tempest-ServerAddressesTestJSON-1842394723 tempest-ServerAddressesTestJSON-1842394723-project-member] Releasing lock "refresh_cache-c928bf83-9517-449a-854c-6f3d8ce4faa0" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 804.292154] env[62096]: DEBUG nova.compute.manager [None req-534ea4b7-f1b9-4ff7-be62-1798f34a685d tempest-ServerAddressesTestJSON-1842394723 tempest-ServerAddressesTestJSON-1842394723-project-member] [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] Start destroying the instance on the hypervisor. {{(pid=62096) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 804.292357] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-534ea4b7-f1b9-4ff7-be62-1798f34a685d tempest-ServerAddressesTestJSON-1842394723 tempest-ServerAddressesTestJSON-1842394723-project-member] [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] Destroying instance {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 804.292643] env[62096]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-78208784-861c-40f6-8073-a07b4388500d {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.301815] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-258faead-b82c-4cd8-a1cc-51af8a7059b9 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.324642] env[62096]: WARNING nova.virt.vmwareapi.vmops [None req-534ea4b7-f1b9-4ff7-be62-1798f34a685d tempest-ServerAddressesTestJSON-1842394723 tempest-ServerAddressesTestJSON-1842394723-project-member] [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance c928bf83-9517-449a-854c-6f3d8ce4faa0 could not be found. [ 804.324872] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-534ea4b7-f1b9-4ff7-be62-1798f34a685d tempest-ServerAddressesTestJSON-1842394723 tempest-ServerAddressesTestJSON-1842394723-project-member] [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] Instance destroyed {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 804.325062] env[62096]: INFO nova.compute.manager [None req-534ea4b7-f1b9-4ff7-be62-1798f34a685d tempest-ServerAddressesTestJSON-1842394723 tempest-ServerAddressesTestJSON-1842394723-project-member] [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] Took 0.03 seconds to destroy the instance on the hypervisor. [ 804.325316] env[62096]: DEBUG oslo.service.loopingcall [None req-534ea4b7-f1b9-4ff7-be62-1798f34a685d tempest-ServerAddressesTestJSON-1842394723 tempest-ServerAddressesTestJSON-1842394723-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 804.325566] env[62096]: DEBUG nova.compute.manager [-] [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 804.325655] env[62096]: DEBUG nova.network.neutron [-] [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 804.345959] env[62096]: DEBUG nova.network.neutron [-] [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 804.346577] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg cc472d28a0c741d5b20b5cc499f49c47 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 804.354046] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cc472d28a0c741d5b20b5cc499f49c47 [ 804.467367] env[62096]: DEBUG nova.scheduler.client.report [None req-36b7f3a2-925c-4aea-b829-50ed82eff8b4 tempest-ImagesNegativeTestJSON-1271305456 tempest-ImagesNegativeTestJSON-1271305456-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 804.469800] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-36b7f3a2-925c-4aea-b829-50ed82eff8b4 tempest-ImagesNegativeTestJSON-1271305456 tempest-ImagesNegativeTestJSON-1271305456-project-member] Expecting reply to msg 9de637e9bb824b5b984943186bd49d64 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 804.482737] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9de637e9bb824b5b984943186bd49d64 [ 804.687898] env[62096]: DEBUG nova.compute.manager [req-3c06fbd1-3124-41de-90c4-bcf7e1823cdc req-51391f2f-d278-415c-9caf-626fb64e057b service nova] [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] Received event network-changed-0be010ec-25ec-41be-b4fc-96b43c921160 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 804.688154] env[62096]: DEBUG nova.compute.manager [req-3c06fbd1-3124-41de-90c4-bcf7e1823cdc req-51391f2f-d278-415c-9caf-626fb64e057b service nova] [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] Refreshing instance network info cache due to event network-changed-0be010ec-25ec-41be-b4fc-96b43c921160. {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 804.688357] env[62096]: DEBUG oslo_concurrency.lockutils [req-3c06fbd1-3124-41de-90c4-bcf7e1823cdc req-51391f2f-d278-415c-9caf-626fb64e057b service nova] Acquiring lock "refresh_cache-c928bf83-9517-449a-854c-6f3d8ce4faa0" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 804.688498] env[62096]: DEBUG oslo_concurrency.lockutils [req-3c06fbd1-3124-41de-90c4-bcf7e1823cdc req-51391f2f-d278-415c-9caf-626fb64e057b service nova] Acquired lock "refresh_cache-c928bf83-9517-449a-854c-6f3d8ce4faa0" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 804.688653] env[62096]: DEBUG nova.network.neutron [req-3c06fbd1-3124-41de-90c4-bcf7e1823cdc req-51391f2f-d278-415c-9caf-626fb64e057b service nova] [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] Refreshing network info cache for port 0be010ec-25ec-41be-b4fc-96b43c921160 {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 804.689076] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-3c06fbd1-3124-41de-90c4-bcf7e1823cdc req-51391f2f-d278-415c-9caf-626fb64e057b service nova] Expecting reply to msg 8cde239bf78d4096b092cc2c9af18d9e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 804.696021] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8cde239bf78d4096b092cc2c9af18d9e [ 804.758291] env[62096]: DEBUG nova.network.neutron [None req-f0b7d82a-4b7b-42e3-8d2d-3e9d2e37aade tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 804.758675] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-f0b7d82a-4b7b-42e3-8d2d-3e9d2e37aade tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg da3af68ed1e2433db4b023f66bc253b5 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 804.767594] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg da3af68ed1e2433db4b023f66bc253b5 [ 804.849839] env[62096]: DEBUG nova.network.neutron [-] [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 804.849839] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg f4174da386504132b54b9cb573caee5f in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 804.858710] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f4174da386504132b54b9cb573caee5f [ 804.977135] env[62096]: DEBUG oslo_concurrency.lockutils [None req-36b7f3a2-925c-4aea-b829-50ed82eff8b4 tempest-ImagesNegativeTestJSON-1271305456 tempest-ImagesNegativeTestJSON-1271305456-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.900s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 804.977135] env[62096]: ERROR nova.compute.manager [None req-36b7f3a2-925c-4aea-b829-50ed82eff8b4 tempest-ImagesNegativeTestJSON-1271305456 tempest-ImagesNegativeTestJSON-1271305456-project-member] [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port ddbe541a-41cd-49f6-8467-7f80404aec07, please check neutron logs for more information. [ 804.977135] env[62096]: ERROR nova.compute.manager [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] Traceback (most recent call last): [ 804.977135] env[62096]: ERROR nova.compute.manager [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 804.977135] env[62096]: ERROR nova.compute.manager [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] self.driver.spawn(context, instance, image_meta, [ 804.977135] env[62096]: ERROR nova.compute.manager [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 804.977135] env[62096]: ERROR nova.compute.manager [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] self._vmops.spawn(context, instance, image_meta, injected_files, [ 804.977135] env[62096]: ERROR nova.compute.manager [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 804.977135] env[62096]: ERROR nova.compute.manager [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] vm_ref = self.build_virtual_machine(instance, [ 804.977820] env[62096]: ERROR nova.compute.manager [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 804.977820] env[62096]: ERROR nova.compute.manager [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] vif_infos = vmwarevif.get_vif_info(self._session, [ 804.977820] env[62096]: ERROR nova.compute.manager [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 804.977820] env[62096]: ERROR nova.compute.manager [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] for vif in network_info: [ 804.977820] env[62096]: ERROR nova.compute.manager [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 804.977820] env[62096]: ERROR nova.compute.manager [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] return self._sync_wrapper(fn, *args, **kwargs) [ 804.977820] env[62096]: ERROR nova.compute.manager [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 804.977820] env[62096]: ERROR nova.compute.manager [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] self.wait() [ 804.977820] env[62096]: ERROR nova.compute.manager [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 804.977820] env[62096]: ERROR nova.compute.manager [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] self[:] = self._gt.wait() [ 804.977820] env[62096]: ERROR nova.compute.manager [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 804.977820] env[62096]: ERROR nova.compute.manager [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] return self._exit_event.wait() [ 804.977820] env[62096]: ERROR nova.compute.manager [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 804.978193] env[62096]: ERROR nova.compute.manager [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] result = hub.switch() [ 804.978193] env[62096]: ERROR nova.compute.manager [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 804.978193] env[62096]: ERROR nova.compute.manager [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] return self.greenlet.switch() [ 804.978193] env[62096]: ERROR nova.compute.manager [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 804.978193] env[62096]: ERROR nova.compute.manager [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] result = function(*args, **kwargs) [ 804.978193] env[62096]: ERROR nova.compute.manager [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 804.978193] env[62096]: ERROR nova.compute.manager [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] return func(*args, **kwargs) [ 804.978193] env[62096]: ERROR nova.compute.manager [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 804.978193] env[62096]: ERROR nova.compute.manager [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] raise e [ 804.978193] env[62096]: ERROR nova.compute.manager [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 804.978193] env[62096]: ERROR nova.compute.manager [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] nwinfo = self.network_api.allocate_for_instance( [ 804.978193] env[62096]: ERROR nova.compute.manager [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 804.978193] env[62096]: ERROR nova.compute.manager [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] created_port_ids = self._update_ports_for_instance( [ 804.978557] env[62096]: ERROR nova.compute.manager [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 804.978557] env[62096]: ERROR nova.compute.manager [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] with excutils.save_and_reraise_exception(): [ 804.978557] env[62096]: ERROR nova.compute.manager [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 804.978557] env[62096]: ERROR nova.compute.manager [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] self.force_reraise() [ 804.978557] env[62096]: ERROR nova.compute.manager [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 804.978557] env[62096]: ERROR nova.compute.manager [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] raise self.value [ 804.978557] env[62096]: ERROR nova.compute.manager [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 804.978557] env[62096]: ERROR nova.compute.manager [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] updated_port = self._update_port( [ 804.978557] env[62096]: ERROR nova.compute.manager [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 804.978557] env[62096]: ERROR nova.compute.manager [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] _ensure_no_port_binding_failure(port) [ 804.978557] env[62096]: ERROR nova.compute.manager [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 804.978557] env[62096]: ERROR nova.compute.manager [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] raise exception.PortBindingFailed(port_id=port['id']) [ 804.978891] env[62096]: ERROR nova.compute.manager [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] nova.exception.PortBindingFailed: Binding failed for port ddbe541a-41cd-49f6-8467-7f80404aec07, please check neutron logs for more information. [ 804.978891] env[62096]: ERROR nova.compute.manager [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] [ 804.978891] env[62096]: DEBUG nova.compute.utils [None req-36b7f3a2-925c-4aea-b829-50ed82eff8b4 tempest-ImagesNegativeTestJSON-1271305456 tempest-ImagesNegativeTestJSON-1271305456-project-member] [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] Binding failed for port ddbe541a-41cd-49f6-8467-7f80404aec07, please check neutron logs for more information. {{(pid=62096) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 804.978891] env[62096]: DEBUG oslo_concurrency.lockutils [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.858s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 804.980989] env[62096]: INFO nova.compute.claims [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 804.982797] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Expecting reply to msg 0ec4dffa1a87484cb23e0fe8f7aa38ec in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 804.988764] env[62096]: DEBUG nova.compute.manager [None req-36b7f3a2-925c-4aea-b829-50ed82eff8b4 tempest-ImagesNegativeTestJSON-1271305456 tempest-ImagesNegativeTestJSON-1271305456-project-member] [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] Build of instance d7bd302c-1d85-45c2-9a3e-9855a6488d92 was re-scheduled: Binding failed for port ddbe541a-41cd-49f6-8467-7f80404aec07, please check neutron logs for more information. {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 804.988764] env[62096]: DEBUG nova.compute.manager [None req-36b7f3a2-925c-4aea-b829-50ed82eff8b4 tempest-ImagesNegativeTestJSON-1271305456 tempest-ImagesNegativeTestJSON-1271305456-project-member] [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] Unplugging VIFs for instance {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 804.988764] env[62096]: DEBUG oslo_concurrency.lockutils [None req-36b7f3a2-925c-4aea-b829-50ed82eff8b4 tempest-ImagesNegativeTestJSON-1271305456 tempest-ImagesNegativeTestJSON-1271305456-project-member] Acquiring lock "refresh_cache-d7bd302c-1d85-45c2-9a3e-9855a6488d92" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 804.988764] env[62096]: DEBUG oslo_concurrency.lockutils [None req-36b7f3a2-925c-4aea-b829-50ed82eff8b4 tempest-ImagesNegativeTestJSON-1271305456 tempest-ImagesNegativeTestJSON-1271305456-project-member] Acquired lock "refresh_cache-d7bd302c-1d85-45c2-9a3e-9855a6488d92" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 804.989038] env[62096]: DEBUG nova.network.neutron [None req-36b7f3a2-925c-4aea-b829-50ed82eff8b4 tempest-ImagesNegativeTestJSON-1271305456 tempest-ImagesNegativeTestJSON-1271305456-project-member] [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 804.989038] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-36b7f3a2-925c-4aea-b829-50ed82eff8b4 tempest-ImagesNegativeTestJSON-1271305456 tempest-ImagesNegativeTestJSON-1271305456-project-member] Expecting reply to msg 135d8005c82042109d8e8ab97b12d136 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 804.995037] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 135d8005c82042109d8e8ab97b12d136 [ 805.028574] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0ec4dffa1a87484cb23e0fe8f7aa38ec [ 805.210785] env[62096]: DEBUG nova.network.neutron [req-3c06fbd1-3124-41de-90c4-bcf7e1823cdc req-51391f2f-d278-415c-9caf-626fb64e057b service nova] [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 805.262275] env[62096]: INFO nova.compute.manager [None req-f0b7d82a-4b7b-42e3-8d2d-3e9d2e37aade tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: de88f8a1-20a5-49f9-adcb-de48aeaa548a] Took 1.02 seconds to deallocate network for instance. [ 805.264122] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-f0b7d82a-4b7b-42e3-8d2d-3e9d2e37aade tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg e03aecdace2347b29b652990578cfaaa in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 805.304345] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e03aecdace2347b29b652990578cfaaa [ 805.353704] env[62096]: INFO nova.compute.manager [-] [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] Took 1.03 seconds to deallocate network for instance. [ 805.355456] env[62096]: DEBUG nova.compute.claims [None req-534ea4b7-f1b9-4ff7-be62-1798f34a685d tempest-ServerAddressesTestJSON-1842394723 tempest-ServerAddressesTestJSON-1842394723-project-member] [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] Aborting claim: {{(pid=62096) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 805.355456] env[62096]: DEBUG oslo_concurrency.lockutils [None req-534ea4b7-f1b9-4ff7-be62-1798f34a685d tempest-ServerAddressesTestJSON-1842394723 tempest-ServerAddressesTestJSON-1842394723-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 805.384854] env[62096]: DEBUG nova.network.neutron [req-3c06fbd1-3124-41de-90c4-bcf7e1823cdc req-51391f2f-d278-415c-9caf-626fb64e057b service nova] [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 805.385394] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-3c06fbd1-3124-41de-90c4-bcf7e1823cdc req-51391f2f-d278-415c-9caf-626fb64e057b service nova] Expecting reply to msg 0159258924394094b2d2761b8fde31d9 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 805.394700] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0159258924394094b2d2761b8fde31d9 [ 805.493731] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Expecting reply to msg 53c1c8d99ab9457d8a40f407a5793934 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 805.504084] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 53c1c8d99ab9457d8a40f407a5793934 [ 805.513141] env[62096]: DEBUG nova.network.neutron [None req-36b7f3a2-925c-4aea-b829-50ed82eff8b4 tempest-ImagesNegativeTestJSON-1271305456 tempest-ImagesNegativeTestJSON-1271305456-project-member] [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 805.664356] env[62096]: DEBUG nova.network.neutron [None req-36b7f3a2-925c-4aea-b829-50ed82eff8b4 tempest-ImagesNegativeTestJSON-1271305456 tempest-ImagesNegativeTestJSON-1271305456-project-member] [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 805.664872] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-36b7f3a2-925c-4aea-b829-50ed82eff8b4 tempest-ImagesNegativeTestJSON-1271305456 tempest-ImagesNegativeTestJSON-1271305456-project-member] Expecting reply to msg 14d88f1f2d584ec8aa9010fbb7f52e12 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 805.704287] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 14d88f1f2d584ec8aa9010fbb7f52e12 [ 805.768157] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-f0b7d82a-4b7b-42e3-8d2d-3e9d2e37aade tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg 968a5de1dcfa41a3a650d071f5e19b3e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 805.811479] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 968a5de1dcfa41a3a650d071f5e19b3e [ 805.888120] env[62096]: DEBUG oslo_concurrency.lockutils [req-3c06fbd1-3124-41de-90c4-bcf7e1823cdc req-51391f2f-d278-415c-9caf-626fb64e057b service nova] Releasing lock "refresh_cache-c928bf83-9517-449a-854c-6f3d8ce4faa0" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 805.888412] env[62096]: DEBUG nova.compute.manager [req-3c06fbd1-3124-41de-90c4-bcf7e1823cdc req-51391f2f-d278-415c-9caf-626fb64e057b service nova] [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] Received event network-vif-deleted-0be010ec-25ec-41be-b4fc-96b43c921160 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 806.167035] env[62096]: DEBUG oslo_concurrency.lockutils [None req-36b7f3a2-925c-4aea-b829-50ed82eff8b4 tempest-ImagesNegativeTestJSON-1271305456 tempest-ImagesNegativeTestJSON-1271305456-project-member] Releasing lock "refresh_cache-d7bd302c-1d85-45c2-9a3e-9855a6488d92" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 806.167608] env[62096]: DEBUG nova.compute.manager [None req-36b7f3a2-925c-4aea-b829-50ed82eff8b4 tempest-ImagesNegativeTestJSON-1271305456 tempest-ImagesNegativeTestJSON-1271305456-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 806.167777] env[62096]: DEBUG nova.compute.manager [None req-36b7f3a2-925c-4aea-b829-50ed82eff8b4 tempest-ImagesNegativeTestJSON-1271305456 tempest-ImagesNegativeTestJSON-1271305456-project-member] [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 806.167940] env[62096]: DEBUG nova.network.neutron [None req-36b7f3a2-925c-4aea-b829-50ed82eff8b4 tempest-ImagesNegativeTestJSON-1271305456 tempest-ImagesNegativeTestJSON-1271305456-project-member] [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 806.186048] env[62096]: DEBUG nova.network.neutron [None req-36b7f3a2-925c-4aea-b829-50ed82eff8b4 tempest-ImagesNegativeTestJSON-1271305456 tempest-ImagesNegativeTestJSON-1271305456-project-member] [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 806.186787] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-36b7f3a2-925c-4aea-b829-50ed82eff8b4 tempest-ImagesNegativeTestJSON-1271305456 tempest-ImagesNegativeTestJSON-1271305456-project-member] Expecting reply to msg 138b3661fdae417d90b5d1e248f28acb in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 806.206795] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 138b3661fdae417d90b5d1e248f28acb [ 806.291523] env[62096]: INFO nova.scheduler.client.report [None req-f0b7d82a-4b7b-42e3-8d2d-3e9d2e37aade tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Deleted allocations for instance de88f8a1-20a5-49f9-adcb-de48aeaa548a [ 806.299817] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-f0b7d82a-4b7b-42e3-8d2d-3e9d2e37aade tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg 02856b61ef054773ab0cef49ae3cab48 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 806.318066] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 02856b61ef054773ab0cef49ae3cab48 [ 806.348654] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33004fc5-577f-4448-9fe8-2742327eb888 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.356614] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7433712e-3f05-40f6-aec0-249fbdf5e506 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.397192] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a7e97dc-246f-416c-963b-8dc6b46ea0a3 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.406242] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcb68b44-621a-42b6-96c4-813bb719ce24 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.424303] env[62096]: DEBUG nova.compute.provider_tree [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 806.424303] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Expecting reply to msg e7c19664c5ed4c4686786732cf453d69 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 806.428995] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e7c19664c5ed4c4686786732cf453d69 [ 806.689065] env[62096]: DEBUG nova.network.neutron [None req-36b7f3a2-925c-4aea-b829-50ed82eff8b4 tempest-ImagesNegativeTestJSON-1271305456 tempest-ImagesNegativeTestJSON-1271305456-project-member] [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 806.689574] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-36b7f3a2-925c-4aea-b829-50ed82eff8b4 tempest-ImagesNegativeTestJSON-1271305456 tempest-ImagesNegativeTestJSON-1271305456-project-member] Expecting reply to msg a1839175bac04827bf9721e7dad37f4f in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 806.698249] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a1839175bac04827bf9721e7dad37f4f [ 806.800627] env[62096]: DEBUG oslo_concurrency.lockutils [None req-f0b7d82a-4b7b-42e3-8d2d-3e9d2e37aade tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Lock "de88f8a1-20a5-49f9-adcb-de48aeaa548a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 167.679s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 806.801196] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-e73da030-7e22-49e7-bda0-aec2ff9e56cf tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Expecting reply to msg b90a993c8ba5464b9c3e444a3be8f29c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 806.811306] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b90a993c8ba5464b9c3e444a3be8f29c [ 806.886102] env[62096]: DEBUG oslo_concurrency.lockutils [None req-3fd8e1c2-0ecc-40e5-90b0-01e465c5f366 tempest-ServersV294TestFqdnHostnames-2113678617 tempest-ServersV294TestFqdnHostnames-2113678617-project-member] Acquiring lock "9b85f3a6-892f-4eb4-b0d8-aacaf6576c78" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 806.886351] env[62096]: DEBUG oslo_concurrency.lockutils [None req-3fd8e1c2-0ecc-40e5-90b0-01e465c5f366 tempest-ServersV294TestFqdnHostnames-2113678617 tempest-ServersV294TestFqdnHostnames-2113678617-project-member] Lock "9b85f3a6-892f-4eb4-b0d8-aacaf6576c78" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 806.924349] env[62096]: DEBUG nova.scheduler.client.report [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 806.926771] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Expecting reply to msg 5ca7fdeefeae4e6095e1a1f6a331e076 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 806.938566] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5ca7fdeefeae4e6095e1a1f6a331e076 [ 807.192318] env[62096]: INFO nova.compute.manager [None req-36b7f3a2-925c-4aea-b829-50ed82eff8b4 tempest-ImagesNegativeTestJSON-1271305456 tempest-ImagesNegativeTestJSON-1271305456-project-member] [instance: d7bd302c-1d85-45c2-9a3e-9855a6488d92] Took 1.02 seconds to deallocate network for instance. [ 807.194043] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-36b7f3a2-925c-4aea-b829-50ed82eff8b4 tempest-ImagesNegativeTestJSON-1271305456 tempest-ImagesNegativeTestJSON-1271305456-project-member] Expecting reply to msg 8e81fefaeff74e05bfbb44623e415e6c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 807.236480] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8e81fefaeff74e05bfbb44623e415e6c [ 807.307176] env[62096]: DEBUG nova.compute.manager [None req-e73da030-7e22-49e7-bda0-aec2ff9e56cf tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] Starting instance... {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 807.309313] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-e73da030-7e22-49e7-bda0-aec2ff9e56cf tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Expecting reply to msg b1ac0b3b9cea4d238207646be1e2a64b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 807.351737] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b1ac0b3b9cea4d238207646be1e2a64b [ 807.429747] env[62096]: DEBUG oslo_concurrency.lockutils [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.453s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 807.431148] env[62096]: DEBUG nova.compute.manager [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] Start building networks asynchronously for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 807.431875] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Expecting reply to msg 03d879f76a4e489b83e031d7106ad322 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 807.432879] env[62096]: DEBUG oslo_concurrency.lockutils [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.414s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 807.434639] env[62096]: INFO nova.compute.claims [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 807.436148] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Expecting reply to msg 5541c6d7868642cd9bb55642267fd1ef in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 807.469173] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 03d879f76a4e489b83e031d7106ad322 [ 807.469857] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5541c6d7868642cd9bb55642267fd1ef [ 807.698531] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-36b7f3a2-925c-4aea-b829-50ed82eff8b4 tempest-ImagesNegativeTestJSON-1271305456 tempest-ImagesNegativeTestJSON-1271305456-project-member] Expecting reply to msg b42a431e3cc34e6aa885a10231e7db6f in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 807.737345] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b42a431e3cc34e6aa885a10231e7db6f [ 807.826902] env[62096]: DEBUG oslo_concurrency.lockutils [None req-e73da030-7e22-49e7-bda0-aec2ff9e56cf tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 807.944500] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Expecting reply to msg 3a0cd3aa8c6440d893f22f7e225ce5bd in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 807.945953] env[62096]: DEBUG nova.compute.utils [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Using /dev/sd instead of None {{(pid=62096) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 807.946526] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Expecting reply to msg 93c3ba04148b49caa82b72163ead96b3 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 807.947447] env[62096]: DEBUG nova.compute.manager [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] Allocating IP information in the background. {{(pid=62096) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 807.947606] env[62096]: DEBUG nova.network.neutron [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] allocate_for_instance() {{(pid=62096) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 807.952946] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3a0cd3aa8c6440d893f22f7e225ce5bd [ 807.956603] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 93c3ba04148b49caa82b72163ead96b3 [ 807.999234] env[62096]: DEBUG nova.policy [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2babb5a1feb74792bfdf84538811475d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4ab5813e2204414394f0ecb3160f4687', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62096) authorize /opt/stack/nova/nova/policy.py:203}} [ 808.134402] env[62096]: DEBUG oslo_concurrency.lockutils [None req-0f19979c-d609-46af-b70a-3aa130c0212c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Acquiring lock "9aae3fde-f55a-4e90-a9b5-4594051183f4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 808.134746] env[62096]: DEBUG oslo_concurrency.lockutils [None req-0f19979c-d609-46af-b70a-3aa130c0212c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Lock "9aae3fde-f55a-4e90-a9b5-4594051183f4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 808.222421] env[62096]: INFO nova.scheduler.client.report [None req-36b7f3a2-925c-4aea-b829-50ed82eff8b4 tempest-ImagesNegativeTestJSON-1271305456 tempest-ImagesNegativeTestJSON-1271305456-project-member] Deleted allocations for instance d7bd302c-1d85-45c2-9a3e-9855a6488d92 [ 808.228078] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-36b7f3a2-925c-4aea-b829-50ed82eff8b4 tempest-ImagesNegativeTestJSON-1271305456 tempest-ImagesNegativeTestJSON-1271305456-project-member] Expecting reply to msg 285f2fb160224689b147a7e2c6daeab9 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 808.239169] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 285f2fb160224689b147a7e2c6daeab9 [ 808.320733] env[62096]: DEBUG nova.network.neutron [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] Successfully created port: 6419be91-62e8-48ce-8284-cedcf93c1574 {{(pid=62096) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 808.451975] env[62096]: DEBUG nova.compute.manager [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] Start building block device mappings for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 808.454461] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Expecting reply to msg ef185444b955450da107516ae6b475ca in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 808.485666] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ef185444b955450da107516ae6b475ca [ 808.729530] env[62096]: DEBUG oslo_concurrency.lockutils [None req-36b7f3a2-925c-4aea-b829-50ed82eff8b4 tempest-ImagesNegativeTestJSON-1271305456 tempest-ImagesNegativeTestJSON-1271305456-project-member] Lock "d7bd302c-1d85-45c2-9a3e-9855a6488d92" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 169.141s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 808.730103] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4da4b970-cc40-4c6a-8873-d7e30a4b4978 tempest-ServerMetadataTestJSON-1279504626 tempest-ServerMetadataTestJSON-1279504626-project-member] Expecting reply to msg e35c6f14cfd74129981e9a2d26be106a in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 808.740975] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e35c6f14cfd74129981e9a2d26be106a [ 808.745751] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97a6b957-ee19-4a1b-9182-7247f4fb15fb {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.753235] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-471f1458-7d7d-43a1-8592-f5b9820ff52f {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.783965] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-464ca508-3f5f-4c3e-89c3-c48d3677b4ea {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.791276] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58750b76-227a-4b78-b0ca-a6172cc71309 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.804665] env[62096]: DEBUG nova.compute.provider_tree [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 808.805269] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Expecting reply to msg 5c978fe63ff94d58b34f493708d87280 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 808.812682] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5c978fe63ff94d58b34f493708d87280 [ 808.959107] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Expecting reply to msg 72436a8be9a741a1907878184703625d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 808.994635] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 72436a8be9a741a1907878184703625d [ 809.231938] env[62096]: DEBUG nova.compute.manager [None req-4da4b970-cc40-4c6a-8873-d7e30a4b4978 tempest-ServerMetadataTestJSON-1279504626 tempest-ServerMetadataTestJSON-1279504626-project-member] [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] Starting instance... {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 809.233689] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4da4b970-cc40-4c6a-8873-d7e30a4b4978 tempest-ServerMetadataTestJSON-1279504626 tempest-ServerMetadataTestJSON-1279504626-project-member] Expecting reply to msg 3fac76987ecc441db6f6c72055a3d2df in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 809.268749] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3fac76987ecc441db6f6c72055a3d2df [ 809.308322] env[62096]: DEBUG nova.scheduler.client.report [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 809.310667] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Expecting reply to msg f5e8f88541d24c8bbbde50ac1a8db357 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 809.325725] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f5e8f88541d24c8bbbde50ac1a8db357 [ 809.425344] env[62096]: DEBUG nova.compute.manager [req-e8fb0c0d-036f-40f6-9839-39cd149e22ea req-2bcdc305-4be4-4179-a75f-2d0f0e7b4390 service nova] [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] Received event network-changed-6419be91-62e8-48ce-8284-cedcf93c1574 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 809.425546] env[62096]: DEBUG nova.compute.manager [req-e8fb0c0d-036f-40f6-9839-39cd149e22ea req-2bcdc305-4be4-4179-a75f-2d0f0e7b4390 service nova] [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] Refreshing instance network info cache due to event network-changed-6419be91-62e8-48ce-8284-cedcf93c1574. {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 809.425752] env[62096]: DEBUG oslo_concurrency.lockutils [req-e8fb0c0d-036f-40f6-9839-39cd149e22ea req-2bcdc305-4be4-4179-a75f-2d0f0e7b4390 service nova] Acquiring lock "refresh_cache-a5b5afc7-abc0-430a-b682-2c1946b4a6d8" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 809.425889] env[62096]: DEBUG oslo_concurrency.lockutils [req-e8fb0c0d-036f-40f6-9839-39cd149e22ea req-2bcdc305-4be4-4179-a75f-2d0f0e7b4390 service nova] Acquired lock "refresh_cache-a5b5afc7-abc0-430a-b682-2c1946b4a6d8" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 809.426045] env[62096]: DEBUG nova.network.neutron [req-e8fb0c0d-036f-40f6-9839-39cd149e22ea req-2bcdc305-4be4-4179-a75f-2d0f0e7b4390 service nova] [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] Refreshing network info cache for port 6419be91-62e8-48ce-8284-cedcf93c1574 {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 809.426485] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-e8fb0c0d-036f-40f6-9839-39cd149e22ea req-2bcdc305-4be4-4179-a75f-2d0f0e7b4390 service nova] Expecting reply to msg d7823e0151294370a7e6d644292f32aa in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 809.435018] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d7823e0151294370a7e6d644292f32aa [ 809.462434] env[62096]: DEBUG nova.compute.manager [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] Start spawning the instance on the hypervisor. {{(pid=62096) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 809.486937] env[62096]: DEBUG nova.virt.hardware [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-19T11:52:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-19T11:51:55Z,direct_url=,disk_format='vmdk',id=fb764baa-3805-45c4-a694-aa91b0932110,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bd76085d004b408bbcf1ebf76ba2593e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-19T11:51:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 809.487313] env[62096]: DEBUG nova.virt.hardware [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Flavor limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 809.487398] env[62096]: DEBUG nova.virt.hardware [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Image limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 809.487606] env[62096]: DEBUG nova.virt.hardware [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Flavor pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 809.487820] env[62096]: DEBUG nova.virt.hardware [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Image pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 809.487944] env[62096]: DEBUG nova.virt.hardware [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 809.488196] env[62096]: DEBUG nova.virt.hardware [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 809.488399] env[62096]: DEBUG nova.virt.hardware [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 809.488596] env[62096]: DEBUG nova.virt.hardware [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Got 1 possible topologies {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 809.488780] env[62096]: DEBUG nova.virt.hardware [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 809.489000] env[62096]: DEBUG nova.virt.hardware [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 809.489869] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a417574-8a4a-4f44-ab7e-c11e9cf93230 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.498874] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e194b5e-5002-4203-9c78-ec9861826701 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.672503] env[62096]: ERROR nova.compute.manager [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 6419be91-62e8-48ce-8284-cedcf93c1574, please check neutron logs for more information. [ 809.672503] env[62096]: ERROR nova.compute.manager Traceback (most recent call last): [ 809.672503] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 809.672503] env[62096]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 809.672503] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 809.672503] env[62096]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 809.672503] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 809.672503] env[62096]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 809.672503] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 809.672503] env[62096]: ERROR nova.compute.manager self.force_reraise() [ 809.672503] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 809.672503] env[62096]: ERROR nova.compute.manager raise self.value [ 809.672503] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 809.672503] env[62096]: ERROR nova.compute.manager updated_port = self._update_port( [ 809.672503] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 809.672503] env[62096]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 809.672950] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 809.672950] env[62096]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 809.672950] env[62096]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 6419be91-62e8-48ce-8284-cedcf93c1574, please check neutron logs for more information. [ 809.672950] env[62096]: ERROR nova.compute.manager [ 809.672950] env[62096]: Traceback (most recent call last): [ 809.672950] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 809.672950] env[62096]: listener.cb(fileno) [ 809.672950] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 809.672950] env[62096]: result = function(*args, **kwargs) [ 809.672950] env[62096]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 809.672950] env[62096]: return func(*args, **kwargs) [ 809.672950] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 809.672950] env[62096]: raise e [ 809.672950] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 809.672950] env[62096]: nwinfo = self.network_api.allocate_for_instance( [ 809.672950] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 809.672950] env[62096]: created_port_ids = self._update_ports_for_instance( [ 809.672950] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 809.672950] env[62096]: with excutils.save_and_reraise_exception(): [ 809.672950] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 809.672950] env[62096]: self.force_reraise() [ 809.672950] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 809.672950] env[62096]: raise self.value [ 809.672950] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 809.672950] env[62096]: updated_port = self._update_port( [ 809.672950] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 809.672950] env[62096]: _ensure_no_port_binding_failure(port) [ 809.672950] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 809.672950] env[62096]: raise exception.PortBindingFailed(port_id=port['id']) [ 809.673822] env[62096]: nova.exception.PortBindingFailed: Binding failed for port 6419be91-62e8-48ce-8284-cedcf93c1574, please check neutron logs for more information. [ 809.673822] env[62096]: Removing descriptor: 16 [ 809.673822] env[62096]: ERROR nova.compute.manager [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 6419be91-62e8-48ce-8284-cedcf93c1574, please check neutron logs for more information. [ 809.673822] env[62096]: ERROR nova.compute.manager [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] Traceback (most recent call last): [ 809.673822] env[62096]: ERROR nova.compute.manager [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 809.673822] env[62096]: ERROR nova.compute.manager [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] yield resources [ 809.673822] env[62096]: ERROR nova.compute.manager [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 809.673822] env[62096]: ERROR nova.compute.manager [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] self.driver.spawn(context, instance, image_meta, [ 809.673822] env[62096]: ERROR nova.compute.manager [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 809.673822] env[62096]: ERROR nova.compute.manager [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] self._vmops.spawn(context, instance, image_meta, injected_files, [ 809.673822] env[62096]: ERROR nova.compute.manager [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 809.673822] env[62096]: ERROR nova.compute.manager [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] vm_ref = self.build_virtual_machine(instance, [ 809.674192] env[62096]: ERROR nova.compute.manager [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 809.674192] env[62096]: ERROR nova.compute.manager [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] vif_infos = vmwarevif.get_vif_info(self._session, [ 809.674192] env[62096]: ERROR nova.compute.manager [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 809.674192] env[62096]: ERROR nova.compute.manager [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] for vif in network_info: [ 809.674192] env[62096]: ERROR nova.compute.manager [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 809.674192] env[62096]: ERROR nova.compute.manager [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] return self._sync_wrapper(fn, *args, **kwargs) [ 809.674192] env[62096]: ERROR nova.compute.manager [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 809.674192] env[62096]: ERROR nova.compute.manager [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] self.wait() [ 809.674192] env[62096]: ERROR nova.compute.manager [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 809.674192] env[62096]: ERROR nova.compute.manager [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] self[:] = self._gt.wait() [ 809.674192] env[62096]: ERROR nova.compute.manager [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 809.674192] env[62096]: ERROR nova.compute.manager [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] return self._exit_event.wait() [ 809.674192] env[62096]: ERROR nova.compute.manager [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 809.675363] env[62096]: ERROR nova.compute.manager [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] result = hub.switch() [ 809.675363] env[62096]: ERROR nova.compute.manager [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 809.675363] env[62096]: ERROR nova.compute.manager [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] return self.greenlet.switch() [ 809.675363] env[62096]: ERROR nova.compute.manager [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 809.675363] env[62096]: ERROR nova.compute.manager [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] result = function(*args, **kwargs) [ 809.675363] env[62096]: ERROR nova.compute.manager [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 809.675363] env[62096]: ERROR nova.compute.manager [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] return func(*args, **kwargs) [ 809.675363] env[62096]: ERROR nova.compute.manager [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 809.675363] env[62096]: ERROR nova.compute.manager [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] raise e [ 809.675363] env[62096]: ERROR nova.compute.manager [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 809.675363] env[62096]: ERROR nova.compute.manager [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] nwinfo = self.network_api.allocate_for_instance( [ 809.675363] env[62096]: ERROR nova.compute.manager [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 809.675363] env[62096]: ERROR nova.compute.manager [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] created_port_ids = self._update_ports_for_instance( [ 809.675734] env[62096]: ERROR nova.compute.manager [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 809.675734] env[62096]: ERROR nova.compute.manager [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] with excutils.save_and_reraise_exception(): [ 809.675734] env[62096]: ERROR nova.compute.manager [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 809.675734] env[62096]: ERROR nova.compute.manager [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] self.force_reraise() [ 809.675734] env[62096]: ERROR nova.compute.manager [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 809.675734] env[62096]: ERROR nova.compute.manager [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] raise self.value [ 809.675734] env[62096]: ERROR nova.compute.manager [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 809.675734] env[62096]: ERROR nova.compute.manager [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] updated_port = self._update_port( [ 809.675734] env[62096]: ERROR nova.compute.manager [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 809.675734] env[62096]: ERROR nova.compute.manager [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] _ensure_no_port_binding_failure(port) [ 809.675734] env[62096]: ERROR nova.compute.manager [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 809.675734] env[62096]: ERROR nova.compute.manager [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] raise exception.PortBindingFailed(port_id=port['id']) [ 809.676089] env[62096]: ERROR nova.compute.manager [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] nova.exception.PortBindingFailed: Binding failed for port 6419be91-62e8-48ce-8284-cedcf93c1574, please check neutron logs for more information. [ 809.676089] env[62096]: ERROR nova.compute.manager [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] [ 809.676089] env[62096]: INFO nova.compute.manager [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] Terminating instance [ 809.676721] env[62096]: DEBUG oslo_concurrency.lockutils [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Acquiring lock "refresh_cache-a5b5afc7-abc0-430a-b682-2c1946b4a6d8" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 809.754360] env[62096]: DEBUG oslo_concurrency.lockutils [None req-4da4b970-cc40-4c6a-8873-d7e30a4b4978 tempest-ServerMetadataTestJSON-1279504626 tempest-ServerMetadataTestJSON-1279504626-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 809.813436] env[62096]: DEBUG oslo_concurrency.lockutils [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.380s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 809.814003] env[62096]: DEBUG nova.compute.manager [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] Start building networks asynchronously for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 809.815730] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Expecting reply to msg 4a70239433664c098980086fa869be42 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 809.816769] env[62096]: DEBUG oslo_concurrency.lockutils [None req-143f4a25-9058-4c14-8b18-575eda4d5e74 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 25.597s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 809.818398] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-143f4a25-9058-4c14-8b18-575eda4d5e74 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Expecting reply to msg 0b617ee2d20943d4b84af4986647f99c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 809.853574] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0b617ee2d20943d4b84af4986647f99c [ 809.854623] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4a70239433664c098980086fa869be42 [ 809.944574] env[62096]: DEBUG nova.network.neutron [req-e8fb0c0d-036f-40f6-9839-39cd149e22ea req-2bcdc305-4be4-4179-a75f-2d0f0e7b4390 service nova] [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 810.102172] env[62096]: DEBUG nova.network.neutron [req-e8fb0c0d-036f-40f6-9839-39cd149e22ea req-2bcdc305-4be4-4179-a75f-2d0f0e7b4390 service nova] [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 810.102711] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-e8fb0c0d-036f-40f6-9839-39cd149e22ea req-2bcdc305-4be4-4179-a75f-2d0f0e7b4390 service nova] Expecting reply to msg 23cf9f47477348bf9d4e52a7e5d1d852 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 810.111238] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 23cf9f47477348bf9d4e52a7e5d1d852 [ 810.320952] env[62096]: DEBUG nova.compute.utils [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Using /dev/sd instead of None {{(pid=62096) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 810.321577] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Expecting reply to msg 1c96c196c0e745669b1cab360982d479 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 810.325563] env[62096]: DEBUG nova.compute.manager [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] Allocating IP information in the background. {{(pid=62096) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 810.325780] env[62096]: DEBUG nova.network.neutron [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] allocate_for_instance() {{(pid=62096) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 810.331548] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1c96c196c0e745669b1cab360982d479 [ 810.368290] env[62096]: DEBUG nova.policy [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2babb5a1feb74792bfdf84538811475d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4ab5813e2204414394f0ecb3160f4687', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62096) authorize /opt/stack/nova/nova/policy.py:203}} [ 810.580079] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4777173b-adb4-4c3e-91d2-95df2b75cd56 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.597790] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d729a7ed-5045-4d70-ab31-6d9fdb881f70 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.628508] env[62096]: DEBUG oslo_concurrency.lockutils [req-e8fb0c0d-036f-40f6-9839-39cd149e22ea req-2bcdc305-4be4-4179-a75f-2d0f0e7b4390 service nova] Releasing lock "refresh_cache-a5b5afc7-abc0-430a-b682-2c1946b4a6d8" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 810.629092] env[62096]: DEBUG oslo_concurrency.lockutils [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Acquired lock "refresh_cache-a5b5afc7-abc0-430a-b682-2c1946b4a6d8" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 810.629276] env[62096]: DEBUG nova.network.neutron [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 810.629702] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Expecting reply to msg 5140b672087c43fc943e541d181b51e4 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 810.630971] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c308449c-291c-4d22-bab0-023b52cee904 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.638037] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5140b672087c43fc943e541d181b51e4 [ 810.639309] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12da0b27-f6d8-49a8-8cc7-b1e098b2f5d1 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.652863] env[62096]: DEBUG nova.compute.provider_tree [None req-143f4a25-9058-4c14-8b18-575eda4d5e74 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 810.653340] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-143f4a25-9058-4c14-8b18-575eda4d5e74 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Expecting reply to msg 129ea1524dd949b6b8d5cbcb982b371b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 810.661217] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 129ea1524dd949b6b8d5cbcb982b371b [ 810.688071] env[62096]: DEBUG nova.network.neutron [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] Successfully created port: fe3b5425-7821-4806-a61c-d0d8e6c0b228 {{(pid=62096) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 810.826812] env[62096]: DEBUG nova.compute.manager [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] Start building block device mappings for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 810.828701] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Expecting reply to msg ec885a2c864b4068b94668672a002f63 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 810.865258] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ec885a2c864b4068b94668672a002f63 [ 811.160305] env[62096]: DEBUG nova.scheduler.client.report [None req-143f4a25-9058-4c14-8b18-575eda4d5e74 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 811.162670] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-143f4a25-9058-4c14-8b18-575eda4d5e74 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Expecting reply to msg ee7e4d5f18834a358fa21d126308f0fd in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 811.168530] env[62096]: DEBUG nova.network.neutron [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 811.177149] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ee7e4d5f18834a358fa21d126308f0fd [ 811.312241] env[62096]: DEBUG nova.network.neutron [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 811.312789] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Expecting reply to msg bf7e924d44da4806b4f37f4c42210f5b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 811.321549] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bf7e924d44da4806b4f37f4c42210f5b [ 811.333781] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Expecting reply to msg 35f0fd7b2d4c47cabed8458b94afa467 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 811.371601] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 35f0fd7b2d4c47cabed8458b94afa467 [ 811.503504] env[62096]: DEBUG nova.compute.manager [req-4b62af9b-708e-4a6f-8e3c-25b613b7a52d req-6d8c53a1-d0b9-40ca-81b0-4d6db19320a6 service nova] [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] Received event network-vif-deleted-6419be91-62e8-48ce-8284-cedcf93c1574 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 811.671865] env[62096]: DEBUG oslo_concurrency.lockutils [None req-143f4a25-9058-4c14-8b18-575eda4d5e74 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.855s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 811.672367] env[62096]: ERROR nova.compute.manager [None req-143f4a25-9058-4c14-8b18-575eda4d5e74 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port b29ebaaf-3fa3-40c7-86c7-fc7395ca11ff, please check neutron logs for more information. [ 811.672367] env[62096]: ERROR nova.compute.manager [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] Traceback (most recent call last): [ 811.672367] env[62096]: ERROR nova.compute.manager [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 811.672367] env[62096]: ERROR nova.compute.manager [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] self.driver.spawn(context, instance, image_meta, [ 811.672367] env[62096]: ERROR nova.compute.manager [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 811.672367] env[62096]: ERROR nova.compute.manager [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 811.672367] env[62096]: ERROR nova.compute.manager [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 811.672367] env[62096]: ERROR nova.compute.manager [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] vm_ref = self.build_virtual_machine(instance, [ 811.672367] env[62096]: ERROR nova.compute.manager [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 811.672367] env[62096]: ERROR nova.compute.manager [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] vif_infos = vmwarevif.get_vif_info(self._session, [ 811.672367] env[62096]: ERROR nova.compute.manager [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 811.672887] env[62096]: ERROR nova.compute.manager [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] for vif in network_info: [ 811.672887] env[62096]: ERROR nova.compute.manager [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 811.672887] env[62096]: ERROR nova.compute.manager [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] return self._sync_wrapper(fn, *args, **kwargs) [ 811.672887] env[62096]: ERROR nova.compute.manager [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 811.672887] env[62096]: ERROR nova.compute.manager [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] self.wait() [ 811.672887] env[62096]: ERROR nova.compute.manager [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 811.672887] env[62096]: ERROR nova.compute.manager [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] self[:] = self._gt.wait() [ 811.672887] env[62096]: ERROR nova.compute.manager [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 811.672887] env[62096]: ERROR nova.compute.manager [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] return self._exit_event.wait() [ 811.672887] env[62096]: ERROR nova.compute.manager [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 811.672887] env[62096]: ERROR nova.compute.manager [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] result = hub.switch() [ 811.672887] env[62096]: ERROR nova.compute.manager [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 811.672887] env[62096]: ERROR nova.compute.manager [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] return self.greenlet.switch() [ 811.673413] env[62096]: ERROR nova.compute.manager [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 811.673413] env[62096]: ERROR nova.compute.manager [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] result = function(*args, **kwargs) [ 811.673413] env[62096]: ERROR nova.compute.manager [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 811.673413] env[62096]: ERROR nova.compute.manager [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] return func(*args, **kwargs) [ 811.673413] env[62096]: ERROR nova.compute.manager [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 811.673413] env[62096]: ERROR nova.compute.manager [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] raise e [ 811.673413] env[62096]: ERROR nova.compute.manager [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 811.673413] env[62096]: ERROR nova.compute.manager [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] nwinfo = self.network_api.allocate_for_instance( [ 811.673413] env[62096]: ERROR nova.compute.manager [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 811.673413] env[62096]: ERROR nova.compute.manager [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] created_port_ids = self._update_ports_for_instance( [ 811.673413] env[62096]: ERROR nova.compute.manager [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 811.673413] env[62096]: ERROR nova.compute.manager [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] with excutils.save_and_reraise_exception(): [ 811.673413] env[62096]: ERROR nova.compute.manager [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 811.673949] env[62096]: ERROR nova.compute.manager [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] self.force_reraise() [ 811.673949] env[62096]: ERROR nova.compute.manager [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 811.673949] env[62096]: ERROR nova.compute.manager [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] raise self.value [ 811.673949] env[62096]: ERROR nova.compute.manager [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 811.673949] env[62096]: ERROR nova.compute.manager [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] updated_port = self._update_port( [ 811.673949] env[62096]: ERROR nova.compute.manager [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 811.673949] env[62096]: ERROR nova.compute.manager [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] _ensure_no_port_binding_failure(port) [ 811.673949] env[62096]: ERROR nova.compute.manager [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 811.673949] env[62096]: ERROR nova.compute.manager [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] raise exception.PortBindingFailed(port_id=port['id']) [ 811.673949] env[62096]: ERROR nova.compute.manager [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] nova.exception.PortBindingFailed: Binding failed for port b29ebaaf-3fa3-40c7-86c7-fc7395ca11ff, please check neutron logs for more information. [ 811.673949] env[62096]: ERROR nova.compute.manager [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] [ 811.674526] env[62096]: DEBUG nova.compute.utils [None req-143f4a25-9058-4c14-8b18-575eda4d5e74 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] Binding failed for port b29ebaaf-3fa3-40c7-86c7-fc7395ca11ff, please check neutron logs for more information. {{(pid=62096) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 811.674526] env[62096]: DEBUG oslo_concurrency.lockutils [None req-a3983baa-43a6-408d-8a39-88c7bf1db267 tempest-TenantUsagesTestJSON-1395762051 tempest-TenantUsagesTestJSON-1395762051-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 25.407s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 811.676125] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a3983baa-43a6-408d-8a39-88c7bf1db267 tempest-TenantUsagesTestJSON-1395762051 tempest-TenantUsagesTestJSON-1395762051-project-member] Expecting reply to msg 4df3f6bcfffb4798a6168680fe5284f6 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 811.680034] env[62096]: DEBUG nova.compute.manager [None req-143f4a25-9058-4c14-8b18-575eda4d5e74 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] Build of instance cffc0ebc-4fb1-47c9-8882-b8431046ef2f was re-scheduled: Binding failed for port b29ebaaf-3fa3-40c7-86c7-fc7395ca11ff, please check neutron logs for more information. {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 811.680034] env[62096]: DEBUG nova.compute.manager [None req-143f4a25-9058-4c14-8b18-575eda4d5e74 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] Unplugging VIFs for instance {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 811.680034] env[62096]: DEBUG oslo_concurrency.lockutils [None req-143f4a25-9058-4c14-8b18-575eda4d5e74 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Acquiring lock "refresh_cache-cffc0ebc-4fb1-47c9-8882-b8431046ef2f" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 811.680034] env[62096]: DEBUG oslo_concurrency.lockutils [None req-143f4a25-9058-4c14-8b18-575eda4d5e74 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Acquired lock "refresh_cache-cffc0ebc-4fb1-47c9-8882-b8431046ef2f" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 811.680034] env[62096]: DEBUG nova.network.neutron [None req-143f4a25-9058-4c14-8b18-575eda4d5e74 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 811.680312] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-143f4a25-9058-4c14-8b18-575eda4d5e74 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Expecting reply to msg aa8aef3901104370bd4107f2ef85cf44 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 811.686595] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg aa8aef3901104370bd4107f2ef85cf44 [ 811.713856] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4df3f6bcfffb4798a6168680fe5284f6 [ 811.815377] env[62096]: DEBUG oslo_concurrency.lockutils [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Releasing lock "refresh_cache-a5b5afc7-abc0-430a-b682-2c1946b4a6d8" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 811.815785] env[62096]: DEBUG nova.compute.manager [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] Start destroying the instance on the hypervisor. {{(pid=62096) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 811.816291] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] Destroying instance {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 811.816606] env[62096]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3f2d015b-3f16-4233-a062-fd0a927d70a8 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.825471] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6df2eb6f-9ee2-4217-ae39-81659f21206a {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.842507] env[62096]: DEBUG nova.compute.manager [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] Start spawning the instance on the hypervisor. {{(pid=62096) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 811.853763] env[62096]: WARNING nova.virt.vmwareapi.vmops [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance a5b5afc7-abc0-430a-b682-2c1946b4a6d8 could not be found. [ 811.854073] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] Instance destroyed {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 811.854327] env[62096]: INFO nova.compute.manager [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] Took 0.04 seconds to destroy the instance on the hypervisor. [ 811.854672] env[62096]: DEBUG oslo.service.loopingcall [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 811.854967] env[62096]: DEBUG nova.compute.manager [-] [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 811.855070] env[62096]: DEBUG nova.network.neutron [-] [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 811.858108] env[62096]: ERROR nova.compute.manager [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port fe3b5425-7821-4806-a61c-d0d8e6c0b228, please check neutron logs for more information. [ 811.858108] env[62096]: ERROR nova.compute.manager Traceback (most recent call last): [ 811.858108] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 811.858108] env[62096]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 811.858108] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 811.858108] env[62096]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 811.858108] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 811.858108] env[62096]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 811.858108] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 811.858108] env[62096]: ERROR nova.compute.manager self.force_reraise() [ 811.858108] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 811.858108] env[62096]: ERROR nova.compute.manager raise self.value [ 811.858108] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 811.858108] env[62096]: ERROR nova.compute.manager updated_port = self._update_port( [ 811.858108] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 811.858108] env[62096]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 811.858569] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 811.858569] env[62096]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 811.858569] env[62096]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port fe3b5425-7821-4806-a61c-d0d8e6c0b228, please check neutron logs for more information. [ 811.858569] env[62096]: ERROR nova.compute.manager [ 811.858569] env[62096]: Traceback (most recent call last): [ 811.858569] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 811.858569] env[62096]: listener.cb(fileno) [ 811.858569] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 811.858569] env[62096]: result = function(*args, **kwargs) [ 811.858569] env[62096]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 811.858569] env[62096]: return func(*args, **kwargs) [ 811.858569] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 811.858569] env[62096]: raise e [ 811.858569] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 811.858569] env[62096]: nwinfo = self.network_api.allocate_for_instance( [ 811.858569] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 811.858569] env[62096]: created_port_ids = self._update_ports_for_instance( [ 811.858569] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 811.858569] env[62096]: with excutils.save_and_reraise_exception(): [ 811.858569] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 811.858569] env[62096]: self.force_reraise() [ 811.858569] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 811.858569] env[62096]: raise self.value [ 811.858569] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 811.858569] env[62096]: updated_port = self._update_port( [ 811.858569] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 811.858569] env[62096]: _ensure_no_port_binding_failure(port) [ 811.858569] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 811.858569] env[62096]: raise exception.PortBindingFailed(port_id=port['id']) [ 811.859350] env[62096]: nova.exception.PortBindingFailed: Binding failed for port fe3b5425-7821-4806-a61c-d0d8e6c0b228, please check neutron logs for more information. [ 811.859350] env[62096]: Removing descriptor: 16 [ 811.866086] env[62096]: DEBUG nova.virt.hardware [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-19T11:52:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-19T11:51:55Z,direct_url=,disk_format='vmdk',id=fb764baa-3805-45c4-a694-aa91b0932110,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bd76085d004b408bbcf1ebf76ba2593e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-19T11:51:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 811.866350] env[62096]: DEBUG nova.virt.hardware [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Flavor limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 811.866517] env[62096]: DEBUG nova.virt.hardware [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Image limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 811.866700] env[62096]: DEBUG nova.virt.hardware [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Flavor pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 811.866840] env[62096]: DEBUG nova.virt.hardware [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Image pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 811.866983] env[62096]: DEBUG nova.virt.hardware [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 811.867185] env[62096]: DEBUG nova.virt.hardware [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 811.867342] env[62096]: DEBUG nova.virt.hardware [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 811.867506] env[62096]: DEBUG nova.virt.hardware [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Got 1 possible topologies {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 811.867851] env[62096]: DEBUG nova.virt.hardware [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 811.868075] env[62096]: DEBUG nova.virt.hardware [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 811.868873] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3173c819-2b9f-45b6-9a3a-3524ec24d813 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.876133] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8278cbc2-0e48-4e81-bb25-889805699436 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.880846] env[62096]: DEBUG nova.network.neutron [-] [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 811.881370] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 788e29f62e84471cb3cf277d4e935ac2 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 811.899847] env[62096]: ERROR nova.compute.manager [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port fe3b5425-7821-4806-a61c-d0d8e6c0b228, please check neutron logs for more information. [ 811.899847] env[62096]: ERROR nova.compute.manager [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] Traceback (most recent call last): [ 811.899847] env[62096]: ERROR nova.compute.manager [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 811.899847] env[62096]: ERROR nova.compute.manager [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] yield resources [ 811.899847] env[62096]: ERROR nova.compute.manager [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 811.899847] env[62096]: ERROR nova.compute.manager [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] self.driver.spawn(context, instance, image_meta, [ 811.899847] env[62096]: ERROR nova.compute.manager [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 811.899847] env[62096]: ERROR nova.compute.manager [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 811.899847] env[62096]: ERROR nova.compute.manager [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 811.899847] env[62096]: ERROR nova.compute.manager [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] vm_ref = self.build_virtual_machine(instance, [ 811.899847] env[62096]: ERROR nova.compute.manager [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 811.900255] env[62096]: ERROR nova.compute.manager [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] vif_infos = vmwarevif.get_vif_info(self._session, [ 811.900255] env[62096]: ERROR nova.compute.manager [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 811.900255] env[62096]: ERROR nova.compute.manager [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] for vif in network_info: [ 811.900255] env[62096]: ERROR nova.compute.manager [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 811.900255] env[62096]: ERROR nova.compute.manager [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] return self._sync_wrapper(fn, *args, **kwargs) [ 811.900255] env[62096]: ERROR nova.compute.manager [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 811.900255] env[62096]: ERROR nova.compute.manager [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] self.wait() [ 811.900255] env[62096]: ERROR nova.compute.manager [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 811.900255] env[62096]: ERROR nova.compute.manager [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] self[:] = self._gt.wait() [ 811.900255] env[62096]: ERROR nova.compute.manager [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 811.900255] env[62096]: ERROR nova.compute.manager [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] return self._exit_event.wait() [ 811.900255] env[62096]: ERROR nova.compute.manager [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 811.900255] env[62096]: ERROR nova.compute.manager [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] current.throw(*self._exc) [ 811.900574] env[62096]: ERROR nova.compute.manager [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 811.900574] env[62096]: ERROR nova.compute.manager [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] result = function(*args, **kwargs) [ 811.900574] env[62096]: ERROR nova.compute.manager [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 811.900574] env[62096]: ERROR nova.compute.manager [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] return func(*args, **kwargs) [ 811.900574] env[62096]: ERROR nova.compute.manager [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 811.900574] env[62096]: ERROR nova.compute.manager [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] raise e [ 811.900574] env[62096]: ERROR nova.compute.manager [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 811.900574] env[62096]: ERROR nova.compute.manager [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] nwinfo = self.network_api.allocate_for_instance( [ 811.900574] env[62096]: ERROR nova.compute.manager [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 811.900574] env[62096]: ERROR nova.compute.manager [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] created_port_ids = self._update_ports_for_instance( [ 811.900574] env[62096]: ERROR nova.compute.manager [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 811.900574] env[62096]: ERROR nova.compute.manager [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] with excutils.save_and_reraise_exception(): [ 811.900574] env[62096]: ERROR nova.compute.manager [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 811.900917] env[62096]: ERROR nova.compute.manager [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] self.force_reraise() [ 811.900917] env[62096]: ERROR nova.compute.manager [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 811.900917] env[62096]: ERROR nova.compute.manager [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] raise self.value [ 811.900917] env[62096]: ERROR nova.compute.manager [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 811.900917] env[62096]: ERROR nova.compute.manager [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] updated_port = self._update_port( [ 811.900917] env[62096]: ERROR nova.compute.manager [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 811.900917] env[62096]: ERROR nova.compute.manager [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] _ensure_no_port_binding_failure(port) [ 811.900917] env[62096]: ERROR nova.compute.manager [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 811.900917] env[62096]: ERROR nova.compute.manager [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] raise exception.PortBindingFailed(port_id=port['id']) [ 811.900917] env[62096]: ERROR nova.compute.manager [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] nova.exception.PortBindingFailed: Binding failed for port fe3b5425-7821-4806-a61c-d0d8e6c0b228, please check neutron logs for more information. [ 811.900917] env[62096]: ERROR nova.compute.manager [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] [ 811.900917] env[62096]: INFO nova.compute.manager [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] Terminating instance [ 811.901253] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 788e29f62e84471cb3cf277d4e935ac2 [ 811.902292] env[62096]: DEBUG oslo_concurrency.lockutils [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Acquiring lock "refresh_cache-4ce3f48e-e45c-4628-8c35-8493c655a6f3" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 811.902453] env[62096]: DEBUG oslo_concurrency.lockutils [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Acquired lock "refresh_cache-4ce3f48e-e45c-4628-8c35-8493c655a6f3" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 811.902618] env[62096]: DEBUG nova.network.neutron [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 811.903141] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Expecting reply to msg 17394b496b9d4132830e9fab3daf4fde in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 811.910695] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 17394b496b9d4132830e9fab3daf4fde [ 812.213600] env[62096]: DEBUG nova.network.neutron [None req-143f4a25-9058-4c14-8b18-575eda4d5e74 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 812.357890] env[62096]: DEBUG nova.network.neutron [None req-143f4a25-9058-4c14-8b18-575eda4d5e74 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 812.358478] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-143f4a25-9058-4c14-8b18-575eda4d5e74 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Expecting reply to msg 387c08be594e4086ae92e45a2ad91ddd in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 812.366711] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 387c08be594e4086ae92e45a2ad91ddd [ 812.383498] env[62096]: DEBUG nova.network.neutron [-] [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 812.383498] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 8acb4cc4c4b74544aeeb0eb0cc489b84 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 812.391180] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8acb4cc4c4b74544aeeb0eb0cc489b84 [ 812.417813] env[62096]: DEBUG nova.network.neutron [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 812.458954] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94d9133e-4404-4127-b50c-4efdebd12a2b {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.467142] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be5c0213-b05a-4603-9d55-582f599fb0e5 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.496941] env[62096]: DEBUG nova.network.neutron [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 812.497459] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Expecting reply to msg a330d6de1de14f2f823dcbdb7d57aa53 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 812.503904] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7a5796f-a784-43cc-b341-ae1942438672 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.506809] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a330d6de1de14f2f823dcbdb7d57aa53 [ 812.507567] env[62096]: DEBUG oslo_concurrency.lockutils [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Releasing lock "refresh_cache-4ce3f48e-e45c-4628-8c35-8493c655a6f3" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 812.507948] env[62096]: DEBUG nova.compute.manager [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] Start destroying the instance on the hypervisor. {{(pid=62096) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 812.508157] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] Destroying instance {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 812.508653] env[62096]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-15238f5c-88ca-4c78-8eb7-ca458cec48ca {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.516372] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd000d74-b3a7-41fc-9e02-568a590b6847 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.524554] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3539d1f-2878-411b-b219-8da57bd85659 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.545068] env[62096]: DEBUG nova.compute.provider_tree [None req-a3983baa-43a6-408d-8a39-88c7bf1db267 tempest-TenantUsagesTestJSON-1395762051 tempest-TenantUsagesTestJSON-1395762051-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 812.546458] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a3983baa-43a6-408d-8a39-88c7bf1db267 tempest-TenantUsagesTestJSON-1395762051 tempest-TenantUsagesTestJSON-1395762051-project-member] Expecting reply to msg 29cb60c3cfc4434483f95191fac2f094 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 812.551054] env[62096]: WARNING nova.virt.vmwareapi.vmops [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 4ce3f48e-e45c-4628-8c35-8493c655a6f3 could not be found. [ 812.551460] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] Instance destroyed {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 812.551743] env[62096]: INFO nova.compute.manager [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] Took 0.04 seconds to destroy the instance on the hypervisor. [ 812.552112] env[62096]: DEBUG oslo.service.loopingcall [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 812.553029] env[62096]: DEBUG nova.compute.manager [-] [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 812.553226] env[62096]: DEBUG nova.network.neutron [-] [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 812.555144] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 29cb60c3cfc4434483f95191fac2f094 [ 812.567480] env[62096]: DEBUG nova.network.neutron [-] [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 812.568918] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg c452b13c7136421b93eb59e52ccaa5d3 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 812.575134] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c452b13c7136421b93eb59e52ccaa5d3 [ 812.861945] env[62096]: DEBUG oslo_concurrency.lockutils [None req-143f4a25-9058-4c14-8b18-575eda4d5e74 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Releasing lock "refresh_cache-cffc0ebc-4fb1-47c9-8882-b8431046ef2f" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 812.862187] env[62096]: DEBUG nova.compute.manager [None req-143f4a25-9058-4c14-8b18-575eda4d5e74 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 812.862373] env[62096]: DEBUG nova.compute.manager [None req-143f4a25-9058-4c14-8b18-575eda4d5e74 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 812.862540] env[62096]: DEBUG nova.network.neutron [None req-143f4a25-9058-4c14-8b18-575eda4d5e74 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 812.881668] env[62096]: DEBUG nova.network.neutron [None req-143f4a25-9058-4c14-8b18-575eda4d5e74 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 812.882403] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-143f4a25-9058-4c14-8b18-575eda4d5e74 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Expecting reply to msg 61a8227e5d8146a3aa032b20ddd1465b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 812.890149] env[62096]: INFO nova.compute.manager [-] [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] Took 1.04 seconds to deallocate network for instance. [ 812.890975] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 61a8227e5d8146a3aa032b20ddd1465b [ 812.893786] env[62096]: DEBUG nova.compute.claims [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] Aborting claim: {{(pid=62096) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 812.893978] env[62096]: DEBUG oslo_concurrency.lockutils [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 813.053925] env[62096]: DEBUG nova.scheduler.client.report [None req-a3983baa-43a6-408d-8a39-88c7bf1db267 tempest-TenantUsagesTestJSON-1395762051 tempest-TenantUsagesTestJSON-1395762051-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 813.056435] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a3983baa-43a6-408d-8a39-88c7bf1db267 tempest-TenantUsagesTestJSON-1395762051 tempest-TenantUsagesTestJSON-1395762051-project-member] Expecting reply to msg c6067fd2ae55468891a1a9238672b2c6 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 813.069784] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c6067fd2ae55468891a1a9238672b2c6 [ 813.071550] env[62096]: DEBUG nova.network.neutron [-] [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 813.071927] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg aba9d8c0c5af47c092a44fabf856b48c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 813.079821] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg aba9d8c0c5af47c092a44fabf856b48c [ 813.391252] env[62096]: DEBUG nova.network.neutron [None req-143f4a25-9058-4c14-8b18-575eda4d5e74 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 813.391774] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-143f4a25-9058-4c14-8b18-575eda4d5e74 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Expecting reply to msg 8fcedd53a59541e78119e97c9c4d5d45 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 813.400759] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8fcedd53a59541e78119e97c9c4d5d45 [ 813.537967] env[62096]: DEBUG nova.compute.manager [req-1f109d3d-c058-4946-b5f0-a3245401dc53 req-60003501-07cb-42c5-93d8-d45aeba82da1 service nova] [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] Received event network-changed-fe3b5425-7821-4806-a61c-d0d8e6c0b228 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 813.538185] env[62096]: DEBUG nova.compute.manager [req-1f109d3d-c058-4946-b5f0-a3245401dc53 req-60003501-07cb-42c5-93d8-d45aeba82da1 service nova] [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] Refreshing instance network info cache due to event network-changed-fe3b5425-7821-4806-a61c-d0d8e6c0b228. {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 813.538401] env[62096]: DEBUG oslo_concurrency.lockutils [req-1f109d3d-c058-4946-b5f0-a3245401dc53 req-60003501-07cb-42c5-93d8-d45aeba82da1 service nova] Acquiring lock "refresh_cache-4ce3f48e-e45c-4628-8c35-8493c655a6f3" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 813.538585] env[62096]: DEBUG oslo_concurrency.lockutils [req-1f109d3d-c058-4946-b5f0-a3245401dc53 req-60003501-07cb-42c5-93d8-d45aeba82da1 service nova] Acquired lock "refresh_cache-4ce3f48e-e45c-4628-8c35-8493c655a6f3" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 813.538696] env[62096]: DEBUG nova.network.neutron [req-1f109d3d-c058-4946-b5f0-a3245401dc53 req-60003501-07cb-42c5-93d8-d45aeba82da1 service nova] [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] Refreshing network info cache for port fe3b5425-7821-4806-a61c-d0d8e6c0b228 {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 813.539122] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-1f109d3d-c058-4946-b5f0-a3245401dc53 req-60003501-07cb-42c5-93d8-d45aeba82da1 service nova] Expecting reply to msg 0fcf0df3e986493abda39a9d2ad918dc in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 813.547512] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0fcf0df3e986493abda39a9d2ad918dc [ 813.558445] env[62096]: DEBUG oslo_concurrency.lockutils [None req-a3983baa-43a6-408d-8a39-88c7bf1db267 tempest-TenantUsagesTestJSON-1395762051 tempest-TenantUsagesTestJSON-1395762051-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.884s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 813.559111] env[62096]: ERROR nova.compute.manager [None req-a3983baa-43a6-408d-8a39-88c7bf1db267 tempest-TenantUsagesTestJSON-1395762051 tempest-TenantUsagesTestJSON-1395762051-project-member] [instance: e8631ec4-1823-46d2-8553-05e3336fed32] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 89fd1d75-07b4-4483-9ab1-b63973730b8e, please check neutron logs for more information. [ 813.559111] env[62096]: ERROR nova.compute.manager [instance: e8631ec4-1823-46d2-8553-05e3336fed32] Traceback (most recent call last): [ 813.559111] env[62096]: ERROR nova.compute.manager [instance: e8631ec4-1823-46d2-8553-05e3336fed32] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 813.559111] env[62096]: ERROR nova.compute.manager [instance: e8631ec4-1823-46d2-8553-05e3336fed32] self.driver.spawn(context, instance, image_meta, [ 813.559111] env[62096]: ERROR nova.compute.manager [instance: e8631ec4-1823-46d2-8553-05e3336fed32] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 813.559111] env[62096]: ERROR nova.compute.manager [instance: e8631ec4-1823-46d2-8553-05e3336fed32] self._vmops.spawn(context, instance, image_meta, injected_files, [ 813.559111] env[62096]: ERROR nova.compute.manager [instance: e8631ec4-1823-46d2-8553-05e3336fed32] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 813.559111] env[62096]: ERROR nova.compute.manager [instance: e8631ec4-1823-46d2-8553-05e3336fed32] vm_ref = self.build_virtual_machine(instance, [ 813.559111] env[62096]: ERROR nova.compute.manager [instance: e8631ec4-1823-46d2-8553-05e3336fed32] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 813.559111] env[62096]: ERROR nova.compute.manager [instance: e8631ec4-1823-46d2-8553-05e3336fed32] vif_infos = vmwarevif.get_vif_info(self._session, [ 813.559111] env[62096]: ERROR nova.compute.manager [instance: e8631ec4-1823-46d2-8553-05e3336fed32] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 813.559476] env[62096]: ERROR nova.compute.manager [instance: e8631ec4-1823-46d2-8553-05e3336fed32] for vif in network_info: [ 813.559476] env[62096]: ERROR nova.compute.manager [instance: e8631ec4-1823-46d2-8553-05e3336fed32] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 813.559476] env[62096]: ERROR nova.compute.manager [instance: e8631ec4-1823-46d2-8553-05e3336fed32] return self._sync_wrapper(fn, *args, **kwargs) [ 813.559476] env[62096]: ERROR nova.compute.manager [instance: e8631ec4-1823-46d2-8553-05e3336fed32] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 813.559476] env[62096]: ERROR nova.compute.manager [instance: e8631ec4-1823-46d2-8553-05e3336fed32] self.wait() [ 813.559476] env[62096]: ERROR nova.compute.manager [instance: e8631ec4-1823-46d2-8553-05e3336fed32] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 813.559476] env[62096]: ERROR nova.compute.manager [instance: e8631ec4-1823-46d2-8553-05e3336fed32] self[:] = self._gt.wait() [ 813.559476] env[62096]: ERROR nova.compute.manager [instance: e8631ec4-1823-46d2-8553-05e3336fed32] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 813.559476] env[62096]: ERROR nova.compute.manager [instance: e8631ec4-1823-46d2-8553-05e3336fed32] return self._exit_event.wait() [ 813.559476] env[62096]: ERROR nova.compute.manager [instance: e8631ec4-1823-46d2-8553-05e3336fed32] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 813.559476] env[62096]: ERROR nova.compute.manager [instance: e8631ec4-1823-46d2-8553-05e3336fed32] result = hub.switch() [ 813.559476] env[62096]: ERROR nova.compute.manager [instance: e8631ec4-1823-46d2-8553-05e3336fed32] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 813.559476] env[62096]: ERROR nova.compute.manager [instance: e8631ec4-1823-46d2-8553-05e3336fed32] return self.greenlet.switch() [ 813.559814] env[62096]: ERROR nova.compute.manager [instance: e8631ec4-1823-46d2-8553-05e3336fed32] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 813.559814] env[62096]: ERROR nova.compute.manager [instance: e8631ec4-1823-46d2-8553-05e3336fed32] result = function(*args, **kwargs) [ 813.559814] env[62096]: ERROR nova.compute.manager [instance: e8631ec4-1823-46d2-8553-05e3336fed32] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 813.559814] env[62096]: ERROR nova.compute.manager [instance: e8631ec4-1823-46d2-8553-05e3336fed32] return func(*args, **kwargs) [ 813.559814] env[62096]: ERROR nova.compute.manager [instance: e8631ec4-1823-46d2-8553-05e3336fed32] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 813.559814] env[62096]: ERROR nova.compute.manager [instance: e8631ec4-1823-46d2-8553-05e3336fed32] raise e [ 813.559814] env[62096]: ERROR nova.compute.manager [instance: e8631ec4-1823-46d2-8553-05e3336fed32] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 813.559814] env[62096]: ERROR nova.compute.manager [instance: e8631ec4-1823-46d2-8553-05e3336fed32] nwinfo = self.network_api.allocate_for_instance( [ 813.559814] env[62096]: ERROR nova.compute.manager [instance: e8631ec4-1823-46d2-8553-05e3336fed32] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 813.559814] env[62096]: ERROR nova.compute.manager [instance: e8631ec4-1823-46d2-8553-05e3336fed32] created_port_ids = self._update_ports_for_instance( [ 813.559814] env[62096]: ERROR nova.compute.manager [instance: e8631ec4-1823-46d2-8553-05e3336fed32] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 813.559814] env[62096]: ERROR nova.compute.manager [instance: e8631ec4-1823-46d2-8553-05e3336fed32] with excutils.save_and_reraise_exception(): [ 813.559814] env[62096]: ERROR nova.compute.manager [instance: e8631ec4-1823-46d2-8553-05e3336fed32] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 813.560163] env[62096]: ERROR nova.compute.manager [instance: e8631ec4-1823-46d2-8553-05e3336fed32] self.force_reraise() [ 813.560163] env[62096]: ERROR nova.compute.manager [instance: e8631ec4-1823-46d2-8553-05e3336fed32] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 813.560163] env[62096]: ERROR nova.compute.manager [instance: e8631ec4-1823-46d2-8553-05e3336fed32] raise self.value [ 813.560163] env[62096]: ERROR nova.compute.manager [instance: e8631ec4-1823-46d2-8553-05e3336fed32] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 813.560163] env[62096]: ERROR nova.compute.manager [instance: e8631ec4-1823-46d2-8553-05e3336fed32] updated_port = self._update_port( [ 813.560163] env[62096]: ERROR nova.compute.manager [instance: e8631ec4-1823-46d2-8553-05e3336fed32] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 813.560163] env[62096]: ERROR nova.compute.manager [instance: e8631ec4-1823-46d2-8553-05e3336fed32] _ensure_no_port_binding_failure(port) [ 813.560163] env[62096]: ERROR nova.compute.manager [instance: e8631ec4-1823-46d2-8553-05e3336fed32] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 813.560163] env[62096]: ERROR nova.compute.manager [instance: e8631ec4-1823-46d2-8553-05e3336fed32] raise exception.PortBindingFailed(port_id=port['id']) [ 813.560163] env[62096]: ERROR nova.compute.manager [instance: e8631ec4-1823-46d2-8553-05e3336fed32] nova.exception.PortBindingFailed: Binding failed for port 89fd1d75-07b4-4483-9ab1-b63973730b8e, please check neutron logs for more information. [ 813.560163] env[62096]: ERROR nova.compute.manager [instance: e8631ec4-1823-46d2-8553-05e3336fed32] [ 813.560439] env[62096]: DEBUG nova.compute.utils [None req-a3983baa-43a6-408d-8a39-88c7bf1db267 tempest-TenantUsagesTestJSON-1395762051 tempest-TenantUsagesTestJSON-1395762051-project-member] [instance: e8631ec4-1823-46d2-8553-05e3336fed32] Binding failed for port 89fd1d75-07b4-4483-9ab1-b63973730b8e, please check neutron logs for more information. {{(pid=62096) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 813.561258] env[62096]: DEBUG oslo_concurrency.lockutils [None req-cd2ebb42-6f90-4faf-9937-567b5002bce1 tempest-ServerMetadataNegativeTestJSON-946662778 tempest-ServerMetadataNegativeTestJSON-946662778-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.737s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 813.562612] env[62096]: INFO nova.compute.claims [None req-cd2ebb42-6f90-4faf-9937-567b5002bce1 tempest-ServerMetadataNegativeTestJSON-946662778 tempest-ServerMetadataNegativeTestJSON-946662778-project-member] [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 813.564715] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-cd2ebb42-6f90-4faf-9937-567b5002bce1 tempest-ServerMetadataNegativeTestJSON-946662778 tempest-ServerMetadataNegativeTestJSON-946662778-project-member] Expecting reply to msg 2bd5badafa354ea5a5bd53cbf5ccc1d7 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 813.572983] env[62096]: DEBUG nova.compute.manager [None req-a3983baa-43a6-408d-8a39-88c7bf1db267 tempest-TenantUsagesTestJSON-1395762051 tempest-TenantUsagesTestJSON-1395762051-project-member] [instance: e8631ec4-1823-46d2-8553-05e3336fed32] Build of instance e8631ec4-1823-46d2-8553-05e3336fed32 was re-scheduled: Binding failed for port 89fd1d75-07b4-4483-9ab1-b63973730b8e, please check neutron logs for more information. {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 813.572983] env[62096]: DEBUG nova.compute.manager [None req-a3983baa-43a6-408d-8a39-88c7bf1db267 tempest-TenantUsagesTestJSON-1395762051 tempest-TenantUsagesTestJSON-1395762051-project-member] [instance: e8631ec4-1823-46d2-8553-05e3336fed32] Unplugging VIFs for instance {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 813.572983] env[62096]: DEBUG oslo_concurrency.lockutils [None req-a3983baa-43a6-408d-8a39-88c7bf1db267 tempest-TenantUsagesTestJSON-1395762051 tempest-TenantUsagesTestJSON-1395762051-project-member] Acquiring lock "refresh_cache-e8631ec4-1823-46d2-8553-05e3336fed32" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 813.572983] env[62096]: DEBUG oslo_concurrency.lockutils [None req-a3983baa-43a6-408d-8a39-88c7bf1db267 tempest-TenantUsagesTestJSON-1395762051 tempest-TenantUsagesTestJSON-1395762051-project-member] Acquired lock "refresh_cache-e8631ec4-1823-46d2-8553-05e3336fed32" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 813.573219] env[62096]: DEBUG nova.network.neutron [None req-a3983baa-43a6-408d-8a39-88c7bf1db267 tempest-TenantUsagesTestJSON-1395762051 tempest-TenantUsagesTestJSON-1395762051-project-member] [instance: e8631ec4-1823-46d2-8553-05e3336fed32] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 813.573219] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a3983baa-43a6-408d-8a39-88c7bf1db267 tempest-TenantUsagesTestJSON-1395762051 tempest-TenantUsagesTestJSON-1395762051-project-member] Expecting reply to msg 58094078910a41c7826ec6b888baeded in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 813.573790] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 58094078910a41c7826ec6b888baeded [ 813.580597] env[62096]: INFO nova.compute.manager [-] [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] Took 1.03 seconds to deallocate network for instance. [ 813.587732] env[62096]: DEBUG nova.compute.claims [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] Aborting claim: {{(pid=62096) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 813.587732] env[62096]: DEBUG oslo_concurrency.lockutils [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 813.595941] env[62096]: DEBUG nova.network.neutron [None req-a3983baa-43a6-408d-8a39-88c7bf1db267 tempest-TenantUsagesTestJSON-1395762051 tempest-TenantUsagesTestJSON-1395762051-project-member] [instance: e8631ec4-1823-46d2-8553-05e3336fed32] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 813.611357] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2bd5badafa354ea5a5bd53cbf5ccc1d7 [ 813.668541] env[62096]: DEBUG nova.network.neutron [None req-a3983baa-43a6-408d-8a39-88c7bf1db267 tempest-TenantUsagesTestJSON-1395762051 tempest-TenantUsagesTestJSON-1395762051-project-member] [instance: e8631ec4-1823-46d2-8553-05e3336fed32] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 813.669172] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a3983baa-43a6-408d-8a39-88c7bf1db267 tempest-TenantUsagesTestJSON-1395762051 tempest-TenantUsagesTestJSON-1395762051-project-member] Expecting reply to msg bb77ee92d5df4734a30e40218ce949a6 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 813.677731] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bb77ee92d5df4734a30e40218ce949a6 [ 813.894434] env[62096]: INFO nova.compute.manager [None req-143f4a25-9058-4c14-8b18-575eda4d5e74 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: cffc0ebc-4fb1-47c9-8882-b8431046ef2f] Took 1.03 seconds to deallocate network for instance. [ 813.896161] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-143f4a25-9058-4c14-8b18-575eda4d5e74 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Expecting reply to msg 43b405b0f6034705b22f97ac33d8c654 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 813.930214] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 43b405b0f6034705b22f97ac33d8c654 [ 814.070869] env[62096]: DEBUG nova.network.neutron [req-1f109d3d-c058-4946-b5f0-a3245401dc53 req-60003501-07cb-42c5-93d8-d45aeba82da1 service nova] [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 814.076201] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-cd2ebb42-6f90-4faf-9937-567b5002bce1 tempest-ServerMetadataNegativeTestJSON-946662778 tempest-ServerMetadataNegativeTestJSON-946662778-project-member] Expecting reply to msg 6f76d7f588dc4e029a625c8b3f364831 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 814.084776] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6f76d7f588dc4e029a625c8b3f364831 [ 814.171827] env[62096]: DEBUG oslo_concurrency.lockutils [None req-a3983baa-43a6-408d-8a39-88c7bf1db267 tempest-TenantUsagesTestJSON-1395762051 tempest-TenantUsagesTestJSON-1395762051-project-member] Releasing lock "refresh_cache-e8631ec4-1823-46d2-8553-05e3336fed32" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 814.172098] env[62096]: DEBUG nova.compute.manager [None req-a3983baa-43a6-408d-8a39-88c7bf1db267 tempest-TenantUsagesTestJSON-1395762051 tempest-TenantUsagesTestJSON-1395762051-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 814.172276] env[62096]: DEBUG nova.compute.manager [None req-a3983baa-43a6-408d-8a39-88c7bf1db267 tempest-TenantUsagesTestJSON-1395762051 tempest-TenantUsagesTestJSON-1395762051-project-member] [instance: e8631ec4-1823-46d2-8553-05e3336fed32] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 814.172433] env[62096]: DEBUG nova.network.neutron [None req-a3983baa-43a6-408d-8a39-88c7bf1db267 tempest-TenantUsagesTestJSON-1395762051 tempest-TenantUsagesTestJSON-1395762051-project-member] [instance: e8631ec4-1823-46d2-8553-05e3336fed32] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 814.188166] env[62096]: DEBUG nova.network.neutron [None req-a3983baa-43a6-408d-8a39-88c7bf1db267 tempest-TenantUsagesTestJSON-1395762051 tempest-TenantUsagesTestJSON-1395762051-project-member] [instance: e8631ec4-1823-46d2-8553-05e3336fed32] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 814.188723] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a3983baa-43a6-408d-8a39-88c7bf1db267 tempest-TenantUsagesTestJSON-1395762051 tempest-TenantUsagesTestJSON-1395762051-project-member] Expecting reply to msg 14cb72d9978d40108c8350b2d0ddde0e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 814.196482] env[62096]: DEBUG nova.network.neutron [req-1f109d3d-c058-4946-b5f0-a3245401dc53 req-60003501-07cb-42c5-93d8-d45aeba82da1 service nova] [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 814.196936] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-1f109d3d-c058-4946-b5f0-a3245401dc53 req-60003501-07cb-42c5-93d8-d45aeba82da1 service nova] Expecting reply to msg b243842440ab4e78a3865dacf49f4563 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 814.198215] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 14cb72d9978d40108c8350b2d0ddde0e [ 814.206235] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b243842440ab4e78a3865dacf49f4563 [ 814.411357] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-143f4a25-9058-4c14-8b18-575eda4d5e74 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Expecting reply to msg 2eaaa25affcf4f92aeecc2f18d55d71a in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 814.446440] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2eaaa25affcf4f92aeecc2f18d55d71a [ 814.694864] env[62096]: DEBUG nova.network.neutron [None req-a3983baa-43a6-408d-8a39-88c7bf1db267 tempest-TenantUsagesTestJSON-1395762051 tempest-TenantUsagesTestJSON-1395762051-project-member] [instance: e8631ec4-1823-46d2-8553-05e3336fed32] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 814.695434] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a3983baa-43a6-408d-8a39-88c7bf1db267 tempest-TenantUsagesTestJSON-1395762051 tempest-TenantUsagesTestJSON-1395762051-project-member] Expecting reply to msg e933b83d5c2a4bab9c8edd0fbb2e45a1 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 814.699064] env[62096]: DEBUG oslo_concurrency.lockutils [req-1f109d3d-c058-4946-b5f0-a3245401dc53 req-60003501-07cb-42c5-93d8-d45aeba82da1 service nova] Releasing lock "refresh_cache-4ce3f48e-e45c-4628-8c35-8493c655a6f3" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 814.699293] env[62096]: DEBUG nova.compute.manager [req-1f109d3d-c058-4946-b5f0-a3245401dc53 req-60003501-07cb-42c5-93d8-d45aeba82da1 service nova] [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] Received event network-vif-deleted-fe3b5425-7821-4806-a61c-d0d8e6c0b228 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 814.703322] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e933b83d5c2a4bab9c8edd0fbb2e45a1 [ 814.836853] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b074bc09-9c33-46ea-bea0-0adeb295d550 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.845227] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c84bf382-5e02-41a1-8562-676395f549bd {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.873240] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14e66c06-376e-4bf1-9e24-20302300aa57 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.879937] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f1f6fd9-ea01-48cf-8fed-20de4d013805 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.892390] env[62096]: DEBUG nova.compute.provider_tree [None req-cd2ebb42-6f90-4faf-9937-567b5002bce1 tempest-ServerMetadataNegativeTestJSON-946662778 tempest-ServerMetadataNegativeTestJSON-946662778-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 814.892879] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-cd2ebb42-6f90-4faf-9937-567b5002bce1 tempest-ServerMetadataNegativeTestJSON-946662778 tempest-ServerMetadataNegativeTestJSON-946662778-project-member] Expecting reply to msg 74ec03e23ede434d81329b086dd8fc32 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 814.900028] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 74ec03e23ede434d81329b086dd8fc32 [ 814.947832] env[62096]: INFO nova.scheduler.client.report [None req-143f4a25-9058-4c14-8b18-575eda4d5e74 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Deleted allocations for instance cffc0ebc-4fb1-47c9-8882-b8431046ef2f [ 814.953404] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-143f4a25-9058-4c14-8b18-575eda4d5e74 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Expecting reply to msg 7f8fe1510a0c42b2a206ebecb4f64681 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 814.971530] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7f8fe1510a0c42b2a206ebecb4f64681 [ 815.198188] env[62096]: INFO nova.compute.manager [None req-a3983baa-43a6-408d-8a39-88c7bf1db267 tempest-TenantUsagesTestJSON-1395762051 tempest-TenantUsagesTestJSON-1395762051-project-member] [instance: e8631ec4-1823-46d2-8553-05e3336fed32] Took 1.03 seconds to deallocate network for instance. [ 815.199829] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a3983baa-43a6-408d-8a39-88c7bf1db267 tempest-TenantUsagesTestJSON-1395762051 tempest-TenantUsagesTestJSON-1395762051-project-member] Expecting reply to msg 2a8408a7194d419485884642e69e7ca5 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 815.231491] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2a8408a7194d419485884642e69e7ca5 [ 815.395576] env[62096]: DEBUG nova.scheduler.client.report [None req-cd2ebb42-6f90-4faf-9937-567b5002bce1 tempest-ServerMetadataNegativeTestJSON-946662778 tempest-ServerMetadataNegativeTestJSON-946662778-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 815.398201] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-cd2ebb42-6f90-4faf-9937-567b5002bce1 tempest-ServerMetadataNegativeTestJSON-946662778 tempest-ServerMetadataNegativeTestJSON-946662778-project-member] Expecting reply to msg b433fa80f63f46658ed1c2763071f9e6 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 815.409807] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b433fa80f63f46658ed1c2763071f9e6 [ 815.461530] env[62096]: DEBUG oslo_concurrency.lockutils [None req-143f4a25-9058-4c14-8b18-575eda4d5e74 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Lock "cffc0ebc-4fb1-47c9-8882-b8431046ef2f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 168.675s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 815.462219] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-31b37f0f-ed04-4f12-864d-f77713fa80b1 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Expecting reply to msg 5a7a1c9512bf4778af678a59afd66e0f in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 815.472906] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5a7a1c9512bf4778af678a59afd66e0f [ 815.704741] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a3983baa-43a6-408d-8a39-88c7bf1db267 tempest-TenantUsagesTestJSON-1395762051 tempest-TenantUsagesTestJSON-1395762051-project-member] Expecting reply to msg e590238abb84416c8f747c2e1b917afc in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 815.737181] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e590238abb84416c8f747c2e1b917afc [ 815.900714] env[62096]: DEBUG oslo_concurrency.lockutils [None req-cd2ebb42-6f90-4faf-9937-567b5002bce1 tempest-ServerMetadataNegativeTestJSON-946662778 tempest-ServerMetadataNegativeTestJSON-946662778-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.339s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 815.901247] env[62096]: DEBUG nova.compute.manager [None req-cd2ebb42-6f90-4faf-9937-567b5002bce1 tempest-ServerMetadataNegativeTestJSON-946662778 tempest-ServerMetadataNegativeTestJSON-946662778-project-member] [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] Start building networks asynchronously for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 815.902999] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-cd2ebb42-6f90-4faf-9937-567b5002bce1 tempest-ServerMetadataNegativeTestJSON-946662778 tempest-ServerMetadataNegativeTestJSON-946662778-project-member] Expecting reply to msg 92ee3616b8784f8bbe3cad7f740cc6f7 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 815.904058] env[62096]: DEBUG oslo_concurrency.lockutils [None req-2fec121e-bb5d-41f2-9f13-5023c9def8ac tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 13.969s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 815.905846] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-2fec121e-bb5d-41f2-9f13-5023c9def8ac tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Expecting reply to msg 76e65445130d42dfa1e3f4d449c0ba4e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 815.944846] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 92ee3616b8784f8bbe3cad7f740cc6f7 [ 815.963146] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 76e65445130d42dfa1e3f4d449c0ba4e [ 815.964955] env[62096]: DEBUG nova.compute.manager [None req-31b37f0f-ed04-4f12-864d-f77713fa80b1 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] Starting instance... {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 815.966462] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-31b37f0f-ed04-4f12-864d-f77713fa80b1 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Expecting reply to msg f055c6d0fd8a4e78bc15244cf37e580d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 815.997915] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f055c6d0fd8a4e78bc15244cf37e580d [ 816.224139] env[62096]: INFO nova.scheduler.client.report [None req-a3983baa-43a6-408d-8a39-88c7bf1db267 tempest-TenantUsagesTestJSON-1395762051 tempest-TenantUsagesTestJSON-1395762051-project-member] Deleted allocations for instance e8631ec4-1823-46d2-8553-05e3336fed32 [ 816.229553] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a3983baa-43a6-408d-8a39-88c7bf1db267 tempest-TenantUsagesTestJSON-1395762051 tempest-TenantUsagesTestJSON-1395762051-project-member] Expecting reply to msg 24afa55990bb446db1ffb1cbdd6cd31d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 816.242324] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 24afa55990bb446db1ffb1cbdd6cd31d [ 816.408623] env[62096]: DEBUG nova.compute.utils [None req-cd2ebb42-6f90-4faf-9937-567b5002bce1 tempest-ServerMetadataNegativeTestJSON-946662778 tempest-ServerMetadataNegativeTestJSON-946662778-project-member] Using /dev/sd instead of None {{(pid=62096) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 816.409352] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-cd2ebb42-6f90-4faf-9937-567b5002bce1 tempest-ServerMetadataNegativeTestJSON-946662778 tempest-ServerMetadataNegativeTestJSON-946662778-project-member] Expecting reply to msg 86a7056c83ca4feea702f36c3571bd05 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 816.413311] env[62096]: DEBUG nova.compute.manager [None req-cd2ebb42-6f90-4faf-9937-567b5002bce1 tempest-ServerMetadataNegativeTestJSON-946662778 tempest-ServerMetadataNegativeTestJSON-946662778-project-member] [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] Allocating IP information in the background. {{(pid=62096) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 816.413483] env[62096]: DEBUG nova.network.neutron [None req-cd2ebb42-6f90-4faf-9937-567b5002bce1 tempest-ServerMetadataNegativeTestJSON-946662778 tempest-ServerMetadataNegativeTestJSON-946662778-project-member] [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] allocate_for_instance() {{(pid=62096) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 816.421932] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 86a7056c83ca4feea702f36c3571bd05 [ 816.477632] env[62096]: DEBUG nova.policy [None req-cd2ebb42-6f90-4faf-9937-567b5002bce1 tempest-ServerMetadataNegativeTestJSON-946662778 tempest-ServerMetadataNegativeTestJSON-946662778-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '014c8939946c43ea8f6fac72c8b9a227', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7c45455e2bf44bf0a2cef07c43ffe6cd', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62096) authorize /opt/stack/nova/nova/policy.py:203}} [ 816.482843] env[62096]: DEBUG oslo_concurrency.lockutils [None req-31b37f0f-ed04-4f12-864d-f77713fa80b1 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 816.514407] env[62096]: DEBUG oslo_concurrency.lockutils [None req-37030ed6-9fb4-44d5-b2b3-bbe259fd74a3 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Acquiring lock "60bc5e14-f495-48da-8db0-54d75b523822" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 816.514920] env[62096]: DEBUG oslo_concurrency.lockutils [None req-37030ed6-9fb4-44d5-b2b3-bbe259fd74a3 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Lock "60bc5e14-f495-48da-8db0-54d75b523822" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 816.653584] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b705a7a-1a39-4060-b463-d13cfcbe1c2f {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.661056] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e27d81f-2279-44be-b7ee-055282a345bf {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.691706] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c7d6aec-cdcf-4854-a656-3f01ab556958 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.698904] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61fa95ec-fff6-4c67-b08c-838f461501bf {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.716024] env[62096]: DEBUG nova.compute.provider_tree [None req-2fec121e-bb5d-41f2-9f13-5023c9def8ac tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 816.716726] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-2fec121e-bb5d-41f2-9f13-5023c9def8ac tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Expecting reply to msg 391ea0e557144759b632ed07bd808fbc in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 816.724179] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 391ea0e557144759b632ed07bd808fbc [ 816.731832] env[62096]: DEBUG oslo_concurrency.lockutils [None req-a3983baa-43a6-408d-8a39-88c7bf1db267 tempest-TenantUsagesTestJSON-1395762051 tempest-TenantUsagesTestJSON-1395762051-project-member] Lock "e8631ec4-1823-46d2-8553-05e3336fed32" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 158.591s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 816.732583] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-0c343c31-990f-417d-b18d-e0dbd941468a tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Expecting reply to msg 79eb0a599e8a4e89902f68f6f3e5d47a in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 816.742468] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 79eb0a599e8a4e89902f68f6f3e5d47a [ 816.802539] env[62096]: DEBUG nova.network.neutron [None req-cd2ebb42-6f90-4faf-9937-567b5002bce1 tempest-ServerMetadataNegativeTestJSON-946662778 tempest-ServerMetadataNegativeTestJSON-946662778-project-member] [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] Successfully created port: 3e510cfe-9ee5-4953-a188-b59cb01a93c4 {{(pid=62096) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 816.916628] env[62096]: DEBUG nova.compute.manager [None req-cd2ebb42-6f90-4faf-9937-567b5002bce1 tempest-ServerMetadataNegativeTestJSON-946662778 tempest-ServerMetadataNegativeTestJSON-946662778-project-member] [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] Start building block device mappings for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 816.916628] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-cd2ebb42-6f90-4faf-9937-567b5002bce1 tempest-ServerMetadataNegativeTestJSON-946662778 tempest-ServerMetadataNegativeTestJSON-946662778-project-member] Expecting reply to msg be819313107d4d12b7d64948cff2477c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 816.948106] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg be819313107d4d12b7d64948cff2477c [ 817.219786] env[62096]: DEBUG nova.scheduler.client.report [None req-2fec121e-bb5d-41f2-9f13-5023c9def8ac tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 817.222201] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-2fec121e-bb5d-41f2-9f13-5023c9def8ac tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Expecting reply to msg 73c6e0b1a75648b497555c9b4519a69b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 817.234943] env[62096]: DEBUG nova.compute.manager [None req-0c343c31-990f-417d-b18d-e0dbd941468a tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] Starting instance... {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 817.236713] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-0c343c31-990f-417d-b18d-e0dbd941468a tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Expecting reply to msg 10e8535e1eda4a798d9c060ce93441d1 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 817.238415] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 73c6e0b1a75648b497555c9b4519a69b [ 817.269498] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 10e8535e1eda4a798d9c060ce93441d1 [ 817.427631] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-cd2ebb42-6f90-4faf-9937-567b5002bce1 tempest-ServerMetadataNegativeTestJSON-946662778 tempest-ServerMetadataNegativeTestJSON-946662778-project-member] Expecting reply to msg d0cad139828c483ea4cadccb899ec7c7 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 817.460592] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d0cad139828c483ea4cadccb899ec7c7 [ 817.725207] env[62096]: DEBUG oslo_concurrency.lockutils [None req-2fec121e-bb5d-41f2-9f13-5023c9def8ac tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.821s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 817.725846] env[62096]: ERROR nova.compute.manager [None req-2fec121e-bb5d-41f2-9f13-5023c9def8ac tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] [instance: f050f0de-dc84-4825-b490-eafe522354cd] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port fc98466e-f50e-479f-8ee2-97a380fef60d, please check neutron logs for more information. [ 817.725846] env[62096]: ERROR nova.compute.manager [instance: f050f0de-dc84-4825-b490-eafe522354cd] Traceback (most recent call last): [ 817.725846] env[62096]: ERROR nova.compute.manager [instance: f050f0de-dc84-4825-b490-eafe522354cd] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 817.725846] env[62096]: ERROR nova.compute.manager [instance: f050f0de-dc84-4825-b490-eafe522354cd] self.driver.spawn(context, instance, image_meta, [ 817.725846] env[62096]: ERROR nova.compute.manager [instance: f050f0de-dc84-4825-b490-eafe522354cd] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 817.725846] env[62096]: ERROR nova.compute.manager [instance: f050f0de-dc84-4825-b490-eafe522354cd] self._vmops.spawn(context, instance, image_meta, injected_files, [ 817.725846] env[62096]: ERROR nova.compute.manager [instance: f050f0de-dc84-4825-b490-eafe522354cd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 817.725846] env[62096]: ERROR nova.compute.manager [instance: f050f0de-dc84-4825-b490-eafe522354cd] vm_ref = self.build_virtual_machine(instance, [ 817.725846] env[62096]: ERROR nova.compute.manager [instance: f050f0de-dc84-4825-b490-eafe522354cd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 817.725846] env[62096]: ERROR nova.compute.manager [instance: f050f0de-dc84-4825-b490-eafe522354cd] vif_infos = vmwarevif.get_vif_info(self._session, [ 817.725846] env[62096]: ERROR nova.compute.manager [instance: f050f0de-dc84-4825-b490-eafe522354cd] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 817.726160] env[62096]: ERROR nova.compute.manager [instance: f050f0de-dc84-4825-b490-eafe522354cd] for vif in network_info: [ 817.726160] env[62096]: ERROR nova.compute.manager [instance: f050f0de-dc84-4825-b490-eafe522354cd] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 817.726160] env[62096]: ERROR nova.compute.manager [instance: f050f0de-dc84-4825-b490-eafe522354cd] return self._sync_wrapper(fn, *args, **kwargs) [ 817.726160] env[62096]: ERROR nova.compute.manager [instance: f050f0de-dc84-4825-b490-eafe522354cd] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 817.726160] env[62096]: ERROR nova.compute.manager [instance: f050f0de-dc84-4825-b490-eafe522354cd] self.wait() [ 817.726160] env[62096]: ERROR nova.compute.manager [instance: f050f0de-dc84-4825-b490-eafe522354cd] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 817.726160] env[62096]: ERROR nova.compute.manager [instance: f050f0de-dc84-4825-b490-eafe522354cd] self[:] = self._gt.wait() [ 817.726160] env[62096]: ERROR nova.compute.manager [instance: f050f0de-dc84-4825-b490-eafe522354cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 817.726160] env[62096]: ERROR nova.compute.manager [instance: f050f0de-dc84-4825-b490-eafe522354cd] return self._exit_event.wait() [ 817.726160] env[62096]: ERROR nova.compute.manager [instance: f050f0de-dc84-4825-b490-eafe522354cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 817.726160] env[62096]: ERROR nova.compute.manager [instance: f050f0de-dc84-4825-b490-eafe522354cd] result = hub.switch() [ 817.726160] env[62096]: ERROR nova.compute.manager [instance: f050f0de-dc84-4825-b490-eafe522354cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 817.726160] env[62096]: ERROR nova.compute.manager [instance: f050f0de-dc84-4825-b490-eafe522354cd] return self.greenlet.switch() [ 817.726490] env[62096]: ERROR nova.compute.manager [instance: f050f0de-dc84-4825-b490-eafe522354cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 817.726490] env[62096]: ERROR nova.compute.manager [instance: f050f0de-dc84-4825-b490-eafe522354cd] result = function(*args, **kwargs) [ 817.726490] env[62096]: ERROR nova.compute.manager [instance: f050f0de-dc84-4825-b490-eafe522354cd] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 817.726490] env[62096]: ERROR nova.compute.manager [instance: f050f0de-dc84-4825-b490-eafe522354cd] return func(*args, **kwargs) [ 817.726490] env[62096]: ERROR nova.compute.manager [instance: f050f0de-dc84-4825-b490-eafe522354cd] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 817.726490] env[62096]: ERROR nova.compute.manager [instance: f050f0de-dc84-4825-b490-eafe522354cd] raise e [ 817.726490] env[62096]: ERROR nova.compute.manager [instance: f050f0de-dc84-4825-b490-eafe522354cd] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 817.726490] env[62096]: ERROR nova.compute.manager [instance: f050f0de-dc84-4825-b490-eafe522354cd] nwinfo = self.network_api.allocate_for_instance( [ 817.726490] env[62096]: ERROR nova.compute.manager [instance: f050f0de-dc84-4825-b490-eafe522354cd] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 817.726490] env[62096]: ERROR nova.compute.manager [instance: f050f0de-dc84-4825-b490-eafe522354cd] created_port_ids = self._update_ports_for_instance( [ 817.726490] env[62096]: ERROR nova.compute.manager [instance: f050f0de-dc84-4825-b490-eafe522354cd] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 817.726490] env[62096]: ERROR nova.compute.manager [instance: f050f0de-dc84-4825-b490-eafe522354cd] with excutils.save_and_reraise_exception(): [ 817.726490] env[62096]: ERROR nova.compute.manager [instance: f050f0de-dc84-4825-b490-eafe522354cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 817.726815] env[62096]: ERROR nova.compute.manager [instance: f050f0de-dc84-4825-b490-eafe522354cd] self.force_reraise() [ 817.726815] env[62096]: ERROR nova.compute.manager [instance: f050f0de-dc84-4825-b490-eafe522354cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 817.726815] env[62096]: ERROR nova.compute.manager [instance: f050f0de-dc84-4825-b490-eafe522354cd] raise self.value [ 817.726815] env[62096]: ERROR nova.compute.manager [instance: f050f0de-dc84-4825-b490-eafe522354cd] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 817.726815] env[62096]: ERROR nova.compute.manager [instance: f050f0de-dc84-4825-b490-eafe522354cd] updated_port = self._update_port( [ 817.726815] env[62096]: ERROR nova.compute.manager [instance: f050f0de-dc84-4825-b490-eafe522354cd] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 817.726815] env[62096]: ERROR nova.compute.manager [instance: f050f0de-dc84-4825-b490-eafe522354cd] _ensure_no_port_binding_failure(port) [ 817.726815] env[62096]: ERROR nova.compute.manager [instance: f050f0de-dc84-4825-b490-eafe522354cd] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 817.726815] env[62096]: ERROR nova.compute.manager [instance: f050f0de-dc84-4825-b490-eafe522354cd] raise exception.PortBindingFailed(port_id=port['id']) [ 817.726815] env[62096]: ERROR nova.compute.manager [instance: f050f0de-dc84-4825-b490-eafe522354cd] nova.exception.PortBindingFailed: Binding failed for port fc98466e-f50e-479f-8ee2-97a380fef60d, please check neutron logs for more information. [ 817.726815] env[62096]: ERROR nova.compute.manager [instance: f050f0de-dc84-4825-b490-eafe522354cd] [ 817.727100] env[62096]: DEBUG nova.compute.utils [None req-2fec121e-bb5d-41f2-9f13-5023c9def8ac tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] [instance: f050f0de-dc84-4825-b490-eafe522354cd] Binding failed for port fc98466e-f50e-479f-8ee2-97a380fef60d, please check neutron logs for more information. {{(pid=62096) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 817.727813] env[62096]: DEBUG oslo_concurrency.lockutils [None req-35a36660-23cc-4f3d-b53e-6fe46ab70d44 tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 13.901s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 817.729687] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-35a36660-23cc-4f3d-b53e-6fe46ab70d44 tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Expecting reply to msg 29a8b99dd32d4f15a65ccda5a5064e53 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 817.731485] env[62096]: DEBUG nova.compute.manager [None req-2fec121e-bb5d-41f2-9f13-5023c9def8ac tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] [instance: f050f0de-dc84-4825-b490-eafe522354cd] Build of instance f050f0de-dc84-4825-b490-eafe522354cd was re-scheduled: Binding failed for port fc98466e-f50e-479f-8ee2-97a380fef60d, please check neutron logs for more information. {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 817.732082] env[62096]: DEBUG nova.compute.manager [None req-2fec121e-bb5d-41f2-9f13-5023c9def8ac tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] [instance: f050f0de-dc84-4825-b490-eafe522354cd] Unplugging VIFs for instance {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 817.732308] env[62096]: DEBUG oslo_concurrency.lockutils [None req-2fec121e-bb5d-41f2-9f13-5023c9def8ac tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Acquiring lock "refresh_cache-f050f0de-dc84-4825-b490-eafe522354cd" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 817.732454] env[62096]: DEBUG oslo_concurrency.lockutils [None req-2fec121e-bb5d-41f2-9f13-5023c9def8ac tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Acquired lock "refresh_cache-f050f0de-dc84-4825-b490-eafe522354cd" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 817.732612] env[62096]: DEBUG nova.network.neutron [None req-2fec121e-bb5d-41f2-9f13-5023c9def8ac tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] [instance: f050f0de-dc84-4825-b490-eafe522354cd] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 817.732970] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-2fec121e-bb5d-41f2-9f13-5023c9def8ac tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Expecting reply to msg 6f8a9f276f0548df8fa2913824590fc7 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 817.739334] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6f8a9f276f0548df8fa2913824590fc7 [ 817.757704] env[62096]: DEBUG oslo_concurrency.lockutils [None req-0c343c31-990f-417d-b18d-e0dbd941468a tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 817.764640] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 29a8b99dd32d4f15a65ccda5a5064e53 [ 817.931056] env[62096]: DEBUG nova.compute.manager [None req-cd2ebb42-6f90-4faf-9937-567b5002bce1 tempest-ServerMetadataNegativeTestJSON-946662778 tempest-ServerMetadataNegativeTestJSON-946662778-project-member] [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] Start spawning the instance on the hypervisor. {{(pid=62096) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 817.954680] env[62096]: DEBUG nova.compute.manager [req-12af6310-9c5e-45a2-b3e4-9aae4942c7e5 req-948b7fd8-42ee-4fb6-be46-a4eaae619a97 service nova] [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] Received event network-changed-3e510cfe-9ee5-4953-a188-b59cb01a93c4 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 817.954680] env[62096]: DEBUG nova.compute.manager [req-12af6310-9c5e-45a2-b3e4-9aae4942c7e5 req-948b7fd8-42ee-4fb6-be46-a4eaae619a97 service nova] [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] Refreshing instance network info cache due to event network-changed-3e510cfe-9ee5-4953-a188-b59cb01a93c4. {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 817.954680] env[62096]: DEBUG oslo_concurrency.lockutils [req-12af6310-9c5e-45a2-b3e4-9aae4942c7e5 req-948b7fd8-42ee-4fb6-be46-a4eaae619a97 service nova] Acquiring lock "refresh_cache-7a85b800-725c-4d91-90bd-2056eb2fb116" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 817.954680] env[62096]: DEBUG oslo_concurrency.lockutils [req-12af6310-9c5e-45a2-b3e4-9aae4942c7e5 req-948b7fd8-42ee-4fb6-be46-a4eaae619a97 service nova] Acquired lock "refresh_cache-7a85b800-725c-4d91-90bd-2056eb2fb116" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 817.954680] env[62096]: DEBUG nova.network.neutron [req-12af6310-9c5e-45a2-b3e4-9aae4942c7e5 req-948b7fd8-42ee-4fb6-be46-a4eaae619a97 service nova] [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] Refreshing network info cache for port 3e510cfe-9ee5-4953-a188-b59cb01a93c4 {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 817.954860] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-12af6310-9c5e-45a2-b3e4-9aae4942c7e5 req-948b7fd8-42ee-4fb6-be46-a4eaae619a97 service nova] Expecting reply to msg a10cf701ac694cc5bcf07291d1c6bc07 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 817.957722] env[62096]: DEBUG nova.virt.hardware [None req-cd2ebb42-6f90-4faf-9937-567b5002bce1 tempest-ServerMetadataNegativeTestJSON-946662778 tempest-ServerMetadataNegativeTestJSON-946662778-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-19T11:52:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-19T11:51:55Z,direct_url=,disk_format='vmdk',id=fb764baa-3805-45c4-a694-aa91b0932110,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bd76085d004b408bbcf1ebf76ba2593e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-19T11:51:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 817.957988] env[62096]: DEBUG nova.virt.hardware [None req-cd2ebb42-6f90-4faf-9937-567b5002bce1 tempest-ServerMetadataNegativeTestJSON-946662778 tempest-ServerMetadataNegativeTestJSON-946662778-project-member] Flavor limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 817.958093] env[62096]: DEBUG nova.virt.hardware [None req-cd2ebb42-6f90-4faf-9937-567b5002bce1 tempest-ServerMetadataNegativeTestJSON-946662778 tempest-ServerMetadataNegativeTestJSON-946662778-project-member] Image limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 817.958345] env[62096]: DEBUG nova.virt.hardware [None req-cd2ebb42-6f90-4faf-9937-567b5002bce1 tempest-ServerMetadataNegativeTestJSON-946662778 tempest-ServerMetadataNegativeTestJSON-946662778-project-member] Flavor pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 817.958418] env[62096]: DEBUG nova.virt.hardware [None req-cd2ebb42-6f90-4faf-9937-567b5002bce1 tempest-ServerMetadataNegativeTestJSON-946662778 tempest-ServerMetadataNegativeTestJSON-946662778-project-member] Image pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 817.958544] env[62096]: DEBUG nova.virt.hardware [None req-cd2ebb42-6f90-4faf-9937-567b5002bce1 tempest-ServerMetadataNegativeTestJSON-946662778 tempest-ServerMetadataNegativeTestJSON-946662778-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 817.958752] env[62096]: DEBUG nova.virt.hardware [None req-cd2ebb42-6f90-4faf-9937-567b5002bce1 tempest-ServerMetadataNegativeTestJSON-946662778 tempest-ServerMetadataNegativeTestJSON-946662778-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 817.958905] env[62096]: DEBUG nova.virt.hardware [None req-cd2ebb42-6f90-4faf-9937-567b5002bce1 tempest-ServerMetadataNegativeTestJSON-946662778 tempest-ServerMetadataNegativeTestJSON-946662778-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 817.959083] env[62096]: DEBUG nova.virt.hardware [None req-cd2ebb42-6f90-4faf-9937-567b5002bce1 tempest-ServerMetadataNegativeTestJSON-946662778 tempest-ServerMetadataNegativeTestJSON-946662778-project-member] Got 1 possible topologies {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 817.959231] env[62096]: DEBUG nova.virt.hardware [None req-cd2ebb42-6f90-4faf-9937-567b5002bce1 tempest-ServerMetadataNegativeTestJSON-946662778 tempest-ServerMetadataNegativeTestJSON-946662778-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 817.959400] env[62096]: DEBUG nova.virt.hardware [None req-cd2ebb42-6f90-4faf-9937-567b5002bce1 tempest-ServerMetadataNegativeTestJSON-946662778 tempest-ServerMetadataNegativeTestJSON-946662778-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 817.960553] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c292e785-8bfa-4146-a058-e13218f35836 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.963785] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a10cf701ac694cc5bcf07291d1c6bc07 [ 817.971016] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0bd2114-066f-4848-aaed-39257ce99c05 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.303275] env[62096]: DEBUG nova.network.neutron [None req-2fec121e-bb5d-41f2-9f13-5023c9def8ac tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] [instance: f050f0de-dc84-4825-b490-eafe522354cd] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 818.381865] env[62096]: ERROR nova.compute.manager [None req-cd2ebb42-6f90-4faf-9937-567b5002bce1 tempest-ServerMetadataNegativeTestJSON-946662778 tempest-ServerMetadataNegativeTestJSON-946662778-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 3e510cfe-9ee5-4953-a188-b59cb01a93c4, please check neutron logs for more information. [ 818.381865] env[62096]: ERROR nova.compute.manager Traceback (most recent call last): [ 818.381865] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 818.381865] env[62096]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 818.381865] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 818.381865] env[62096]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 818.381865] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 818.381865] env[62096]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 818.381865] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 818.381865] env[62096]: ERROR nova.compute.manager self.force_reraise() [ 818.381865] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 818.381865] env[62096]: ERROR nova.compute.manager raise self.value [ 818.381865] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 818.381865] env[62096]: ERROR nova.compute.manager updated_port = self._update_port( [ 818.381865] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 818.381865] env[62096]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 818.382346] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 818.382346] env[62096]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 818.382346] env[62096]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 3e510cfe-9ee5-4953-a188-b59cb01a93c4, please check neutron logs for more information. [ 818.382346] env[62096]: ERROR nova.compute.manager [ 818.382346] env[62096]: Traceback (most recent call last): [ 818.382346] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 818.382346] env[62096]: listener.cb(fileno) [ 818.382346] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 818.382346] env[62096]: result = function(*args, **kwargs) [ 818.382346] env[62096]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 818.382346] env[62096]: return func(*args, **kwargs) [ 818.382346] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 818.382346] env[62096]: raise e [ 818.382346] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 818.382346] env[62096]: nwinfo = self.network_api.allocate_for_instance( [ 818.382346] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 818.382346] env[62096]: created_port_ids = self._update_ports_for_instance( [ 818.382346] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 818.382346] env[62096]: with excutils.save_and_reraise_exception(): [ 818.382346] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 818.382346] env[62096]: self.force_reraise() [ 818.382346] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 818.382346] env[62096]: raise self.value [ 818.382346] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 818.382346] env[62096]: updated_port = self._update_port( [ 818.382346] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 818.382346] env[62096]: _ensure_no_port_binding_failure(port) [ 818.382346] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 818.382346] env[62096]: raise exception.PortBindingFailed(port_id=port['id']) [ 818.383123] env[62096]: nova.exception.PortBindingFailed: Binding failed for port 3e510cfe-9ee5-4953-a188-b59cb01a93c4, please check neutron logs for more information. [ 818.383123] env[62096]: Removing descriptor: 19 [ 818.383123] env[62096]: ERROR nova.compute.manager [None req-cd2ebb42-6f90-4faf-9937-567b5002bce1 tempest-ServerMetadataNegativeTestJSON-946662778 tempest-ServerMetadataNegativeTestJSON-946662778-project-member] [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 3e510cfe-9ee5-4953-a188-b59cb01a93c4, please check neutron logs for more information. [ 818.383123] env[62096]: ERROR nova.compute.manager [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] Traceback (most recent call last): [ 818.383123] env[62096]: ERROR nova.compute.manager [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 818.383123] env[62096]: ERROR nova.compute.manager [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] yield resources [ 818.383123] env[62096]: ERROR nova.compute.manager [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 818.383123] env[62096]: ERROR nova.compute.manager [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] self.driver.spawn(context, instance, image_meta, [ 818.383123] env[62096]: ERROR nova.compute.manager [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 818.383123] env[62096]: ERROR nova.compute.manager [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] self._vmops.spawn(context, instance, image_meta, injected_files, [ 818.383123] env[62096]: ERROR nova.compute.manager [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 818.383123] env[62096]: ERROR nova.compute.manager [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] vm_ref = self.build_virtual_machine(instance, [ 818.383474] env[62096]: ERROR nova.compute.manager [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 818.383474] env[62096]: ERROR nova.compute.manager [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] vif_infos = vmwarevif.get_vif_info(self._session, [ 818.383474] env[62096]: ERROR nova.compute.manager [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 818.383474] env[62096]: ERROR nova.compute.manager [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] for vif in network_info: [ 818.383474] env[62096]: ERROR nova.compute.manager [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 818.383474] env[62096]: ERROR nova.compute.manager [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] return self._sync_wrapper(fn, *args, **kwargs) [ 818.383474] env[62096]: ERROR nova.compute.manager [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 818.383474] env[62096]: ERROR nova.compute.manager [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] self.wait() [ 818.383474] env[62096]: ERROR nova.compute.manager [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 818.383474] env[62096]: ERROR nova.compute.manager [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] self[:] = self._gt.wait() [ 818.383474] env[62096]: ERROR nova.compute.manager [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 818.383474] env[62096]: ERROR nova.compute.manager [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] return self._exit_event.wait() [ 818.383474] env[62096]: ERROR nova.compute.manager [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 818.383905] env[62096]: ERROR nova.compute.manager [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] result = hub.switch() [ 818.383905] env[62096]: ERROR nova.compute.manager [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 818.383905] env[62096]: ERROR nova.compute.manager [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] return self.greenlet.switch() [ 818.383905] env[62096]: ERROR nova.compute.manager [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 818.383905] env[62096]: ERROR nova.compute.manager [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] result = function(*args, **kwargs) [ 818.383905] env[62096]: ERROR nova.compute.manager [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 818.383905] env[62096]: ERROR nova.compute.manager [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] return func(*args, **kwargs) [ 818.383905] env[62096]: ERROR nova.compute.manager [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 818.383905] env[62096]: ERROR nova.compute.manager [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] raise e [ 818.383905] env[62096]: ERROR nova.compute.manager [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 818.383905] env[62096]: ERROR nova.compute.manager [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] nwinfo = self.network_api.allocate_for_instance( [ 818.383905] env[62096]: ERROR nova.compute.manager [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 818.383905] env[62096]: ERROR nova.compute.manager [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] created_port_ids = self._update_ports_for_instance( [ 818.384380] env[62096]: ERROR nova.compute.manager [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 818.384380] env[62096]: ERROR nova.compute.manager [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] with excutils.save_and_reraise_exception(): [ 818.384380] env[62096]: ERROR nova.compute.manager [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 818.384380] env[62096]: ERROR nova.compute.manager [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] self.force_reraise() [ 818.384380] env[62096]: ERROR nova.compute.manager [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 818.384380] env[62096]: ERROR nova.compute.manager [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] raise self.value [ 818.384380] env[62096]: ERROR nova.compute.manager [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 818.384380] env[62096]: ERROR nova.compute.manager [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] updated_port = self._update_port( [ 818.384380] env[62096]: ERROR nova.compute.manager [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 818.384380] env[62096]: ERROR nova.compute.manager [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] _ensure_no_port_binding_failure(port) [ 818.384380] env[62096]: ERROR nova.compute.manager [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 818.384380] env[62096]: ERROR nova.compute.manager [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] raise exception.PortBindingFailed(port_id=port['id']) [ 818.384720] env[62096]: ERROR nova.compute.manager [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] nova.exception.PortBindingFailed: Binding failed for port 3e510cfe-9ee5-4953-a188-b59cb01a93c4, please check neutron logs for more information. [ 818.384720] env[62096]: ERROR nova.compute.manager [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] [ 818.384720] env[62096]: INFO nova.compute.manager [None req-cd2ebb42-6f90-4faf-9937-567b5002bce1 tempest-ServerMetadataNegativeTestJSON-946662778 tempest-ServerMetadataNegativeTestJSON-946662778-project-member] [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] Terminating instance [ 818.385643] env[62096]: DEBUG oslo_concurrency.lockutils [None req-cd2ebb42-6f90-4faf-9937-567b5002bce1 tempest-ServerMetadataNegativeTestJSON-946662778 tempest-ServerMetadataNegativeTestJSON-946662778-project-member] Acquiring lock "refresh_cache-7a85b800-725c-4d91-90bd-2056eb2fb116" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 818.467753] env[62096]: DEBUG nova.network.neutron [None req-2fec121e-bb5d-41f2-9f13-5023c9def8ac tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] [instance: f050f0de-dc84-4825-b490-eafe522354cd] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 818.468314] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-2fec121e-bb5d-41f2-9f13-5023c9def8ac tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Expecting reply to msg 7e979c3877a746169a46f3ae8837e3c1 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 818.475007] env[62096]: DEBUG nova.network.neutron [req-12af6310-9c5e-45a2-b3e4-9aae4942c7e5 req-948b7fd8-42ee-4fb6-be46-a4eaae619a97 service nova] [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 818.477300] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7e979c3877a746169a46f3ae8837e3c1 [ 818.501270] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f1b6ece-0f3d-4465-91d2-1db7ba8564a7 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.509180] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b276582-2cb9-4103-aaaf-919563800255 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.540750] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2da91248-45cb-4922-8785-4c99f0e88388 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.547905] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbf72652-a8da-4325-b2c4-2154e1f88264 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.562019] env[62096]: DEBUG nova.compute.provider_tree [None req-35a36660-23cc-4f3d-b53e-6fe46ab70d44 tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 818.562549] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-35a36660-23cc-4f3d-b53e-6fe46ab70d44 tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Expecting reply to msg 429f6a4175d744639812fa5365fd609c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 818.569771] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 429f6a4175d744639812fa5365fd609c [ 818.580775] env[62096]: DEBUG nova.network.neutron [req-12af6310-9c5e-45a2-b3e4-9aae4942c7e5 req-948b7fd8-42ee-4fb6-be46-a4eaae619a97 service nova] [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 818.581550] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-12af6310-9c5e-45a2-b3e4-9aae4942c7e5 req-948b7fd8-42ee-4fb6-be46-a4eaae619a97 service nova] Expecting reply to msg 9453ab97fcec43c0a3090ac7806a6293 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 818.590633] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9453ab97fcec43c0a3090ac7806a6293 [ 818.971153] env[62096]: DEBUG oslo_concurrency.lockutils [None req-2fec121e-bb5d-41f2-9f13-5023c9def8ac tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Releasing lock "refresh_cache-f050f0de-dc84-4825-b490-eafe522354cd" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 818.971450] env[62096]: DEBUG nova.compute.manager [None req-2fec121e-bb5d-41f2-9f13-5023c9def8ac tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 818.971693] env[62096]: DEBUG nova.compute.manager [None req-2fec121e-bb5d-41f2-9f13-5023c9def8ac tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] [instance: f050f0de-dc84-4825-b490-eafe522354cd] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 818.971933] env[62096]: DEBUG nova.network.neutron [None req-2fec121e-bb5d-41f2-9f13-5023c9def8ac tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] [instance: f050f0de-dc84-4825-b490-eafe522354cd] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 818.988637] env[62096]: DEBUG nova.network.neutron [None req-2fec121e-bb5d-41f2-9f13-5023c9def8ac tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] [instance: f050f0de-dc84-4825-b490-eafe522354cd] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 818.989291] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-2fec121e-bb5d-41f2-9f13-5023c9def8ac tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Expecting reply to msg 8f46787f8c914c459d2e82504f7df5a1 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 819.003666] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8f46787f8c914c459d2e82504f7df5a1 [ 819.065778] env[62096]: DEBUG nova.scheduler.client.report [None req-35a36660-23cc-4f3d-b53e-6fe46ab70d44 tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 819.068207] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-35a36660-23cc-4f3d-b53e-6fe46ab70d44 tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Expecting reply to msg 790f87e809074fe7b4439cc170beffa5 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 819.079656] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 790f87e809074fe7b4439cc170beffa5 [ 819.083101] env[62096]: DEBUG oslo_concurrency.lockutils [req-12af6310-9c5e-45a2-b3e4-9aae4942c7e5 req-948b7fd8-42ee-4fb6-be46-a4eaae619a97 service nova] Releasing lock "refresh_cache-7a85b800-725c-4d91-90bd-2056eb2fb116" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 819.083511] env[62096]: DEBUG oslo_concurrency.lockutils [None req-cd2ebb42-6f90-4faf-9937-567b5002bce1 tempest-ServerMetadataNegativeTestJSON-946662778 tempest-ServerMetadataNegativeTestJSON-946662778-project-member] Acquired lock "refresh_cache-7a85b800-725c-4d91-90bd-2056eb2fb116" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 819.084093] env[62096]: DEBUG nova.network.neutron [None req-cd2ebb42-6f90-4faf-9937-567b5002bce1 tempest-ServerMetadataNegativeTestJSON-946662778 tempest-ServerMetadataNegativeTestJSON-946662778-project-member] [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 819.084331] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-cd2ebb42-6f90-4faf-9937-567b5002bce1 tempest-ServerMetadataNegativeTestJSON-946662778 tempest-ServerMetadataNegativeTestJSON-946662778-project-member] Expecting reply to msg 00b76cb174444f87a1419dac6ca237bd in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 819.090499] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 00b76cb174444f87a1419dac6ca237bd [ 819.492106] env[62096]: DEBUG nova.network.neutron [None req-2fec121e-bb5d-41f2-9f13-5023c9def8ac tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] [instance: f050f0de-dc84-4825-b490-eafe522354cd] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 819.492665] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-2fec121e-bb5d-41f2-9f13-5023c9def8ac tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Expecting reply to msg 605a14af5f314d83a501ec917d9085f2 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 819.503428] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 605a14af5f314d83a501ec917d9085f2 [ 819.570401] env[62096]: DEBUG oslo_concurrency.lockutils [None req-35a36660-23cc-4f3d-b53e-6fe46ab70d44 tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.842s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 819.571031] env[62096]: ERROR nova.compute.manager [None req-35a36660-23cc-4f3d-b53e-6fe46ab70d44 tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 956e7432-5428-47af-a76b-6187f602f8ba, please check neutron logs for more information. [ 819.571031] env[62096]: ERROR nova.compute.manager [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] Traceback (most recent call last): [ 819.571031] env[62096]: ERROR nova.compute.manager [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 819.571031] env[62096]: ERROR nova.compute.manager [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] self.driver.spawn(context, instance, image_meta, [ 819.571031] env[62096]: ERROR nova.compute.manager [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 819.571031] env[62096]: ERROR nova.compute.manager [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 819.571031] env[62096]: ERROR nova.compute.manager [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 819.571031] env[62096]: ERROR nova.compute.manager [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] vm_ref = self.build_virtual_machine(instance, [ 819.571031] env[62096]: ERROR nova.compute.manager [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 819.571031] env[62096]: ERROR nova.compute.manager [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] vif_infos = vmwarevif.get_vif_info(self._session, [ 819.571031] env[62096]: ERROR nova.compute.manager [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 819.571340] env[62096]: ERROR nova.compute.manager [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] for vif in network_info: [ 819.571340] env[62096]: ERROR nova.compute.manager [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 819.571340] env[62096]: ERROR nova.compute.manager [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] return self._sync_wrapper(fn, *args, **kwargs) [ 819.571340] env[62096]: ERROR nova.compute.manager [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 819.571340] env[62096]: ERROR nova.compute.manager [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] self.wait() [ 819.571340] env[62096]: ERROR nova.compute.manager [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 819.571340] env[62096]: ERROR nova.compute.manager [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] self[:] = self._gt.wait() [ 819.571340] env[62096]: ERROR nova.compute.manager [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 819.571340] env[62096]: ERROR nova.compute.manager [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] return self._exit_event.wait() [ 819.571340] env[62096]: ERROR nova.compute.manager [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 819.571340] env[62096]: ERROR nova.compute.manager [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] result = hub.switch() [ 819.571340] env[62096]: ERROR nova.compute.manager [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 819.571340] env[62096]: ERROR nova.compute.manager [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] return self.greenlet.switch() [ 819.571705] env[62096]: ERROR nova.compute.manager [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 819.571705] env[62096]: ERROR nova.compute.manager [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] result = function(*args, **kwargs) [ 819.571705] env[62096]: ERROR nova.compute.manager [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 819.571705] env[62096]: ERROR nova.compute.manager [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] return func(*args, **kwargs) [ 819.571705] env[62096]: ERROR nova.compute.manager [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 819.571705] env[62096]: ERROR nova.compute.manager [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] raise e [ 819.571705] env[62096]: ERROR nova.compute.manager [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 819.571705] env[62096]: ERROR nova.compute.manager [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] nwinfo = self.network_api.allocate_for_instance( [ 819.571705] env[62096]: ERROR nova.compute.manager [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 819.571705] env[62096]: ERROR nova.compute.manager [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] created_port_ids = self._update_ports_for_instance( [ 819.571705] env[62096]: ERROR nova.compute.manager [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 819.571705] env[62096]: ERROR nova.compute.manager [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] with excutils.save_and_reraise_exception(): [ 819.571705] env[62096]: ERROR nova.compute.manager [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 819.572102] env[62096]: ERROR nova.compute.manager [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] self.force_reraise() [ 819.572102] env[62096]: ERROR nova.compute.manager [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 819.572102] env[62096]: ERROR nova.compute.manager [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] raise self.value [ 819.572102] env[62096]: ERROR nova.compute.manager [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 819.572102] env[62096]: ERROR nova.compute.manager [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] updated_port = self._update_port( [ 819.572102] env[62096]: ERROR nova.compute.manager [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 819.572102] env[62096]: ERROR nova.compute.manager [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] _ensure_no_port_binding_failure(port) [ 819.572102] env[62096]: ERROR nova.compute.manager [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 819.572102] env[62096]: ERROR nova.compute.manager [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] raise exception.PortBindingFailed(port_id=port['id']) [ 819.572102] env[62096]: ERROR nova.compute.manager [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] nova.exception.PortBindingFailed: Binding failed for port 956e7432-5428-47af-a76b-6187f602f8ba, please check neutron logs for more information. [ 819.572102] env[62096]: ERROR nova.compute.manager [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] [ 819.572409] env[62096]: DEBUG nova.compute.utils [None req-35a36660-23cc-4f3d-b53e-6fe46ab70d44 tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] Binding failed for port 956e7432-5428-47af-a76b-6187f602f8ba, please check neutron logs for more information. {{(pid=62096) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 819.572910] env[62096]: DEBUG oslo_concurrency.lockutils [None req-534ea4b7-f1b9-4ff7-be62-1798f34a685d tempest-ServerAddressesTestJSON-1842394723 tempest-ServerAddressesTestJSON-1842394723-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.218s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 819.574762] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-534ea4b7-f1b9-4ff7-be62-1798f34a685d tempest-ServerAddressesTestJSON-1842394723 tempest-ServerAddressesTestJSON-1842394723-project-member] Expecting reply to msg c7627dfa5b0d4225991794c42c4d7e78 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 819.575921] env[62096]: DEBUG nova.compute.manager [None req-35a36660-23cc-4f3d-b53e-6fe46ab70d44 tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] Build of instance 3283ae52-1a1f-4dec-91f3-44cc42361bb5 was re-scheduled: Binding failed for port 956e7432-5428-47af-a76b-6187f602f8ba, please check neutron logs for more information. {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 819.576387] env[62096]: DEBUG nova.compute.manager [None req-35a36660-23cc-4f3d-b53e-6fe46ab70d44 tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] Unplugging VIFs for instance {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 819.576618] env[62096]: DEBUG oslo_concurrency.lockutils [None req-35a36660-23cc-4f3d-b53e-6fe46ab70d44 tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Acquiring lock "refresh_cache-3283ae52-1a1f-4dec-91f3-44cc42361bb5" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 819.576762] env[62096]: DEBUG oslo_concurrency.lockutils [None req-35a36660-23cc-4f3d-b53e-6fe46ab70d44 tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Acquired lock "refresh_cache-3283ae52-1a1f-4dec-91f3-44cc42361bb5" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 819.576919] env[62096]: DEBUG nova.network.neutron [None req-35a36660-23cc-4f3d-b53e-6fe46ab70d44 tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 819.577279] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-35a36660-23cc-4f3d-b53e-6fe46ab70d44 tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Expecting reply to msg 6dcff8c9b9c44138847e8ab54144efd1 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 819.583133] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6dcff8c9b9c44138847e8ab54144efd1 [ 819.603885] env[62096]: DEBUG nova.network.neutron [None req-cd2ebb42-6f90-4faf-9937-567b5002bce1 tempest-ServerMetadataNegativeTestJSON-946662778 tempest-ServerMetadataNegativeTestJSON-946662778-project-member] [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 819.612189] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c7627dfa5b0d4225991794c42c4d7e78 [ 819.686596] env[62096]: DEBUG nova.network.neutron [None req-cd2ebb42-6f90-4faf-9937-567b5002bce1 tempest-ServerMetadataNegativeTestJSON-946662778 tempest-ServerMetadataNegativeTestJSON-946662778-project-member] [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 819.687136] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-cd2ebb42-6f90-4faf-9937-567b5002bce1 tempest-ServerMetadataNegativeTestJSON-946662778 tempest-ServerMetadataNegativeTestJSON-946662778-project-member] Expecting reply to msg ad708a572f8b4ee98398c8de135a9213 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 819.695312] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ad708a572f8b4ee98398c8de135a9213 [ 819.983592] env[62096]: DEBUG nova.compute.manager [req-2c616e81-875b-4bba-8800-0bef0c969060 req-e983c2b5-cbbc-48f5-99ae-e10ddc8cf798 service nova] [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] Received event network-vif-deleted-3e510cfe-9ee5-4953-a188-b59cb01a93c4 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 819.997016] env[62096]: INFO nova.compute.manager [None req-2fec121e-bb5d-41f2-9f13-5023c9def8ac tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] [instance: f050f0de-dc84-4825-b490-eafe522354cd] Took 1.02 seconds to deallocate network for instance. [ 819.999749] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-2fec121e-bb5d-41f2-9f13-5023c9def8ac tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Expecting reply to msg c7fa5d2e2654401ab97162041dfa9e80 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 820.050086] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c7fa5d2e2654401ab97162041dfa9e80 [ 820.128580] env[62096]: DEBUG nova.network.neutron [None req-35a36660-23cc-4f3d-b53e-6fe46ab70d44 tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 820.189438] env[62096]: DEBUG oslo_concurrency.lockutils [None req-cd2ebb42-6f90-4faf-9937-567b5002bce1 tempest-ServerMetadataNegativeTestJSON-946662778 tempest-ServerMetadataNegativeTestJSON-946662778-project-member] Releasing lock "refresh_cache-7a85b800-725c-4d91-90bd-2056eb2fb116" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 820.189825] env[62096]: DEBUG nova.compute.manager [None req-cd2ebb42-6f90-4faf-9937-567b5002bce1 tempest-ServerMetadataNegativeTestJSON-946662778 tempest-ServerMetadataNegativeTestJSON-946662778-project-member] [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] Start destroying the instance on the hypervisor. {{(pid=62096) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 820.190016] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-cd2ebb42-6f90-4faf-9937-567b5002bce1 tempest-ServerMetadataNegativeTestJSON-946662778 tempest-ServerMetadataNegativeTestJSON-946662778-project-member] [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] Destroying instance {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 820.190302] env[62096]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7c6dc131-a754-4581-bf81-4e1e035b8612 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.202578] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf8b8493-fa3d-46a0-840b-f48fb28cd9c4 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.221866] env[62096]: WARNING nova.virt.vmwareapi.vmops [None req-cd2ebb42-6f90-4faf-9937-567b5002bce1 tempest-ServerMetadataNegativeTestJSON-946662778 tempest-ServerMetadataNegativeTestJSON-946662778-project-member] [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 7a85b800-725c-4d91-90bd-2056eb2fb116 could not be found. [ 820.222088] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-cd2ebb42-6f90-4faf-9937-567b5002bce1 tempest-ServerMetadataNegativeTestJSON-946662778 tempest-ServerMetadataNegativeTestJSON-946662778-project-member] [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] Instance destroyed {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 820.222263] env[62096]: INFO nova.compute.manager [None req-cd2ebb42-6f90-4faf-9937-567b5002bce1 tempest-ServerMetadataNegativeTestJSON-946662778 tempest-ServerMetadataNegativeTestJSON-946662778-project-member] [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] Took 0.03 seconds to destroy the instance on the hypervisor. [ 820.222501] env[62096]: DEBUG oslo.service.loopingcall [None req-cd2ebb42-6f90-4faf-9937-567b5002bce1 tempest-ServerMetadataNegativeTestJSON-946662778 tempest-ServerMetadataNegativeTestJSON-946662778-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 820.225058] env[62096]: DEBUG nova.compute.manager [-] [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 820.225169] env[62096]: DEBUG nova.network.neutron [-] [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 820.227653] env[62096]: DEBUG nova.network.neutron [None req-35a36660-23cc-4f3d-b53e-6fe46ab70d44 tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 820.228240] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-35a36660-23cc-4f3d-b53e-6fe46ab70d44 tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Expecting reply to msg 89511c35938746579d33146d3a988f3b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 820.236333] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 89511c35938746579d33146d3a988f3b [ 820.247412] env[62096]: DEBUG nova.network.neutron [-] [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 820.247878] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 8bdad788412548adadd4b544d6c1972e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 820.254750] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8bdad788412548adadd4b544d6c1972e [ 820.403280] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd27b7d2-220f-4416-8c8b-2b7055b0f91e {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.411082] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-144341bb-b76e-49ec-88c1-65bb4434e4c2 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.441151] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-650970c6-b51a-4609-8f58-b4671fb74e4a {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.448404] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2107a6e7-5545-41b2-b65c-3e8193658670 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.462539] env[62096]: DEBUG nova.compute.provider_tree [None req-534ea4b7-f1b9-4ff7-be62-1798f34a685d tempest-ServerAddressesTestJSON-1842394723 tempest-ServerAddressesTestJSON-1842394723-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 820.463048] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-534ea4b7-f1b9-4ff7-be62-1798f34a685d tempest-ServerAddressesTestJSON-1842394723 tempest-ServerAddressesTestJSON-1842394723-project-member] Expecting reply to msg 626e4086d31f461d94193d58f45bc99e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 820.483353] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 626e4086d31f461d94193d58f45bc99e [ 820.504624] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-2fec121e-bb5d-41f2-9f13-5023c9def8ac tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Expecting reply to msg 18b9ec2f319946f48a9d02f0fa4cc50f in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 820.541214] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 18b9ec2f319946f48a9d02f0fa4cc50f [ 820.731106] env[62096]: DEBUG oslo_concurrency.lockutils [None req-35a36660-23cc-4f3d-b53e-6fe46ab70d44 tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Releasing lock "refresh_cache-3283ae52-1a1f-4dec-91f3-44cc42361bb5" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 820.731314] env[62096]: DEBUG nova.compute.manager [None req-35a36660-23cc-4f3d-b53e-6fe46ab70d44 tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 820.731500] env[62096]: DEBUG nova.compute.manager [None req-35a36660-23cc-4f3d-b53e-6fe46ab70d44 tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 820.731667] env[62096]: DEBUG nova.network.neutron [None req-35a36660-23cc-4f3d-b53e-6fe46ab70d44 tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 820.746379] env[62096]: DEBUG nova.network.neutron [None req-35a36660-23cc-4f3d-b53e-6fe46ab70d44 tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 820.746957] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-35a36660-23cc-4f3d-b53e-6fe46ab70d44 tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Expecting reply to msg 8e62e5c06ba349879443676eb87a8688 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 820.749447] env[62096]: DEBUG nova.network.neutron [-] [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 820.749804] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg b1d865bf726c4425ac2991e90f48fd02 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 820.753207] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8e62e5c06ba349879443676eb87a8688 [ 820.762394] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b1d865bf726c4425ac2991e90f48fd02 [ 820.968113] env[62096]: DEBUG nova.scheduler.client.report [None req-534ea4b7-f1b9-4ff7-be62-1798f34a685d tempest-ServerAddressesTestJSON-1842394723 tempest-ServerAddressesTestJSON-1842394723-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 820.968304] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-534ea4b7-f1b9-4ff7-be62-1798f34a685d tempest-ServerAddressesTestJSON-1842394723 tempest-ServerAddressesTestJSON-1842394723-project-member] Expecting reply to msg 955133d02f314d3eb05c2d9677f3c622 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 820.983392] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 955133d02f314d3eb05c2d9677f3c622 [ 821.024361] env[62096]: INFO nova.scheduler.client.report [None req-2fec121e-bb5d-41f2-9f13-5023c9def8ac tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Deleted allocations for instance f050f0de-dc84-4825-b490-eafe522354cd [ 821.030128] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-2fec121e-bb5d-41f2-9f13-5023c9def8ac tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Expecting reply to msg e7aeb4bee893446f9cc74b2959505120 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 821.046678] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e7aeb4bee893446f9cc74b2959505120 [ 821.249177] env[62096]: DEBUG nova.network.neutron [None req-35a36660-23cc-4f3d-b53e-6fe46ab70d44 tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 821.249759] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-35a36660-23cc-4f3d-b53e-6fe46ab70d44 tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Expecting reply to msg 754390cbf0d14ce19cce0546678b9fe4 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 821.251110] env[62096]: INFO nova.compute.manager [-] [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] Took 1.03 seconds to deallocate network for instance. [ 821.253250] env[62096]: DEBUG nova.compute.claims [None req-cd2ebb42-6f90-4faf-9937-567b5002bce1 tempest-ServerMetadataNegativeTestJSON-946662778 tempest-ServerMetadataNegativeTestJSON-946662778-project-member] [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] Aborting claim: {{(pid=62096) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 821.253454] env[62096]: DEBUG oslo_concurrency.lockutils [None req-cd2ebb42-6f90-4faf-9937-567b5002bce1 tempest-ServerMetadataNegativeTestJSON-946662778 tempest-ServerMetadataNegativeTestJSON-946662778-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 821.258066] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 754390cbf0d14ce19cce0546678b9fe4 [ 821.471060] env[62096]: DEBUG oslo_concurrency.lockutils [None req-534ea4b7-f1b9-4ff7-be62-1798f34a685d tempest-ServerAddressesTestJSON-1842394723 tempest-ServerAddressesTestJSON-1842394723-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.898s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 821.471907] env[62096]: ERROR nova.compute.manager [None req-534ea4b7-f1b9-4ff7-be62-1798f34a685d tempest-ServerAddressesTestJSON-1842394723 tempest-ServerAddressesTestJSON-1842394723-project-member] [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 0be010ec-25ec-41be-b4fc-96b43c921160, please check neutron logs for more information. [ 821.471907] env[62096]: ERROR nova.compute.manager [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] Traceback (most recent call last): [ 821.471907] env[62096]: ERROR nova.compute.manager [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 821.471907] env[62096]: ERROR nova.compute.manager [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] self.driver.spawn(context, instance, image_meta, [ 821.471907] env[62096]: ERROR nova.compute.manager [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 821.471907] env[62096]: ERROR nova.compute.manager [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] self._vmops.spawn(context, instance, image_meta, injected_files, [ 821.471907] env[62096]: ERROR nova.compute.manager [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 821.471907] env[62096]: ERROR nova.compute.manager [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] vm_ref = self.build_virtual_machine(instance, [ 821.471907] env[62096]: ERROR nova.compute.manager [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 821.471907] env[62096]: ERROR nova.compute.manager [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] vif_infos = vmwarevif.get_vif_info(self._session, [ 821.471907] env[62096]: ERROR nova.compute.manager [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 821.472231] env[62096]: ERROR nova.compute.manager [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] for vif in network_info: [ 821.472231] env[62096]: ERROR nova.compute.manager [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 821.472231] env[62096]: ERROR nova.compute.manager [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] return self._sync_wrapper(fn, *args, **kwargs) [ 821.472231] env[62096]: ERROR nova.compute.manager [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 821.472231] env[62096]: ERROR nova.compute.manager [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] self.wait() [ 821.472231] env[62096]: ERROR nova.compute.manager [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 821.472231] env[62096]: ERROR nova.compute.manager [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] self[:] = self._gt.wait() [ 821.472231] env[62096]: ERROR nova.compute.manager [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 821.472231] env[62096]: ERROR nova.compute.manager [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] return self._exit_event.wait() [ 821.472231] env[62096]: ERROR nova.compute.manager [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 821.472231] env[62096]: ERROR nova.compute.manager [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] result = hub.switch() [ 821.472231] env[62096]: ERROR nova.compute.manager [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 821.472231] env[62096]: ERROR nova.compute.manager [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] return self.greenlet.switch() [ 821.472558] env[62096]: ERROR nova.compute.manager [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 821.472558] env[62096]: ERROR nova.compute.manager [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] result = function(*args, **kwargs) [ 821.472558] env[62096]: ERROR nova.compute.manager [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 821.472558] env[62096]: ERROR nova.compute.manager [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] return func(*args, **kwargs) [ 821.472558] env[62096]: ERROR nova.compute.manager [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 821.472558] env[62096]: ERROR nova.compute.manager [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] raise e [ 821.472558] env[62096]: ERROR nova.compute.manager [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 821.472558] env[62096]: ERROR nova.compute.manager [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] nwinfo = self.network_api.allocate_for_instance( [ 821.472558] env[62096]: ERROR nova.compute.manager [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 821.472558] env[62096]: ERROR nova.compute.manager [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] created_port_ids = self._update_ports_for_instance( [ 821.472558] env[62096]: ERROR nova.compute.manager [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 821.472558] env[62096]: ERROR nova.compute.manager [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] with excutils.save_and_reraise_exception(): [ 821.472558] env[62096]: ERROR nova.compute.manager [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 821.472883] env[62096]: ERROR nova.compute.manager [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] self.force_reraise() [ 821.472883] env[62096]: ERROR nova.compute.manager [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 821.472883] env[62096]: ERROR nova.compute.manager [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] raise self.value [ 821.472883] env[62096]: ERROR nova.compute.manager [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 821.472883] env[62096]: ERROR nova.compute.manager [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] updated_port = self._update_port( [ 821.472883] env[62096]: ERROR nova.compute.manager [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 821.472883] env[62096]: ERROR nova.compute.manager [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] _ensure_no_port_binding_failure(port) [ 821.472883] env[62096]: ERROR nova.compute.manager [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 821.472883] env[62096]: ERROR nova.compute.manager [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] raise exception.PortBindingFailed(port_id=port['id']) [ 821.472883] env[62096]: ERROR nova.compute.manager [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] nova.exception.PortBindingFailed: Binding failed for port 0be010ec-25ec-41be-b4fc-96b43c921160, please check neutron logs for more information. [ 821.472883] env[62096]: ERROR nova.compute.manager [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] [ 821.473408] env[62096]: DEBUG nova.compute.utils [None req-534ea4b7-f1b9-4ff7-be62-1798f34a685d tempest-ServerAddressesTestJSON-1842394723 tempest-ServerAddressesTestJSON-1842394723-project-member] [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] Binding failed for port 0be010ec-25ec-41be-b4fc-96b43c921160, please check neutron logs for more information. {{(pid=62096) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 821.474672] env[62096]: DEBUG oslo_concurrency.lockutils [None req-e73da030-7e22-49e7-bda0-aec2ff9e56cf tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.648s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 821.476621] env[62096]: INFO nova.compute.claims [None req-e73da030-7e22-49e7-bda0-aec2ff9e56cf tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 821.478191] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-e73da030-7e22-49e7-bda0-aec2ff9e56cf tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Expecting reply to msg 094729b6088f4573920884b0aac24f0e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 821.479403] env[62096]: DEBUG nova.compute.manager [None req-534ea4b7-f1b9-4ff7-be62-1798f34a685d tempest-ServerAddressesTestJSON-1842394723 tempest-ServerAddressesTestJSON-1842394723-project-member] [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] Build of instance c928bf83-9517-449a-854c-6f3d8ce4faa0 was re-scheduled: Binding failed for port 0be010ec-25ec-41be-b4fc-96b43c921160, please check neutron logs for more information. {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 821.479902] env[62096]: DEBUG nova.compute.manager [None req-534ea4b7-f1b9-4ff7-be62-1798f34a685d tempest-ServerAddressesTestJSON-1842394723 tempest-ServerAddressesTestJSON-1842394723-project-member] [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] Unplugging VIFs for instance {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 821.480207] env[62096]: DEBUG oslo_concurrency.lockutils [None req-534ea4b7-f1b9-4ff7-be62-1798f34a685d tempest-ServerAddressesTestJSON-1842394723 tempest-ServerAddressesTestJSON-1842394723-project-member] Acquiring lock "refresh_cache-c928bf83-9517-449a-854c-6f3d8ce4faa0" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 821.480421] env[62096]: DEBUG oslo_concurrency.lockutils [None req-534ea4b7-f1b9-4ff7-be62-1798f34a685d tempest-ServerAddressesTestJSON-1842394723 tempest-ServerAddressesTestJSON-1842394723-project-member] Acquired lock "refresh_cache-c928bf83-9517-449a-854c-6f3d8ce4faa0" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 821.480667] env[62096]: DEBUG nova.network.neutron [None req-534ea4b7-f1b9-4ff7-be62-1798f34a685d tempest-ServerAddressesTestJSON-1842394723 tempest-ServerAddressesTestJSON-1842394723-project-member] [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 821.481077] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-534ea4b7-f1b9-4ff7-be62-1798f34a685d tempest-ServerAddressesTestJSON-1842394723 tempest-ServerAddressesTestJSON-1842394723-project-member] Expecting reply to msg b68f4e234fcc4f63988f6165b935276d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 821.487875] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b68f4e234fcc4f63988f6165b935276d [ 821.512529] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 094729b6088f4573920884b0aac24f0e [ 821.531524] env[62096]: DEBUG oslo_concurrency.lockutils [None req-2fec121e-bb5d-41f2-9f13-5023c9def8ac tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Lock "f050f0de-dc84-4825-b490-eafe522354cd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 160.851s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 821.532302] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-265784be-d456-4382-9cd2-a751a291282e tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg 327db18a016b4e3db8bb3e680171d16a in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 821.540947] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 327db18a016b4e3db8bb3e680171d16a [ 821.752981] env[62096]: INFO nova.compute.manager [None req-35a36660-23cc-4f3d-b53e-6fe46ab70d44 tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] [instance: 3283ae52-1a1f-4dec-91f3-44cc42361bb5] Took 1.02 seconds to deallocate network for instance. [ 821.754790] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-35a36660-23cc-4f3d-b53e-6fe46ab70d44 tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Expecting reply to msg 06af174c794b4f2487293d32a717f4dc in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 821.785196] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 06af174c794b4f2487293d32a717f4dc [ 821.984101] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-e73da030-7e22-49e7-bda0-aec2ff9e56cf tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Expecting reply to msg ea03cab6ef38405c8fc7be22a51e4ee5 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 821.991968] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ea03cab6ef38405c8fc7be22a51e4ee5 [ 822.000233] env[62096]: DEBUG nova.network.neutron [None req-534ea4b7-f1b9-4ff7-be62-1798f34a685d tempest-ServerAddressesTestJSON-1842394723 tempest-ServerAddressesTestJSON-1842394723-project-member] [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 822.034795] env[62096]: DEBUG nova.compute.manager [None req-265784be-d456-4382-9cd2-a751a291282e tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] Starting instance... {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 822.036556] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-265784be-d456-4382-9cd2-a751a291282e tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg 418c966bc9ee42a09bb17c81717fc9cf in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 822.064534] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 418c966bc9ee42a09bb17c81717fc9cf [ 822.097484] env[62096]: DEBUG nova.network.neutron [None req-534ea4b7-f1b9-4ff7-be62-1798f34a685d tempest-ServerAddressesTestJSON-1842394723 tempest-ServerAddressesTestJSON-1842394723-project-member] [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 822.098263] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-534ea4b7-f1b9-4ff7-be62-1798f34a685d tempest-ServerAddressesTestJSON-1842394723 tempest-ServerAddressesTestJSON-1842394723-project-member] Expecting reply to msg 668eada5f02c4147a1916122f075e2eb in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 822.106671] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 668eada5f02c4147a1916122f075e2eb [ 822.259136] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-35a36660-23cc-4f3d-b53e-6fe46ab70d44 tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Expecting reply to msg b9aa288013754f6a9848ba6181258b72 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 822.292357] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b9aa288013754f6a9848ba6181258b72 [ 822.553855] env[62096]: DEBUG oslo_concurrency.lockutils [None req-265784be-d456-4382-9cd2-a751a291282e tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 822.602360] env[62096]: DEBUG oslo_concurrency.lockutils [None req-534ea4b7-f1b9-4ff7-be62-1798f34a685d tempest-ServerAddressesTestJSON-1842394723 tempest-ServerAddressesTestJSON-1842394723-project-member] Releasing lock "refresh_cache-c928bf83-9517-449a-854c-6f3d8ce4faa0" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 822.602577] env[62096]: DEBUG nova.compute.manager [None req-534ea4b7-f1b9-4ff7-be62-1798f34a685d tempest-ServerAddressesTestJSON-1842394723 tempest-ServerAddressesTestJSON-1842394723-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 822.602752] env[62096]: DEBUG nova.compute.manager [None req-534ea4b7-f1b9-4ff7-be62-1798f34a685d tempest-ServerAddressesTestJSON-1842394723 tempest-ServerAddressesTestJSON-1842394723-project-member] [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 822.602912] env[62096]: DEBUG nova.network.neutron [None req-534ea4b7-f1b9-4ff7-be62-1798f34a685d tempest-ServerAddressesTestJSON-1842394723 tempest-ServerAddressesTestJSON-1842394723-project-member] [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 822.627710] env[62096]: DEBUG nova.network.neutron [None req-534ea4b7-f1b9-4ff7-be62-1798f34a685d tempest-ServerAddressesTestJSON-1842394723 tempest-ServerAddressesTestJSON-1842394723-project-member] [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 822.628299] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-534ea4b7-f1b9-4ff7-be62-1798f34a685d tempest-ServerAddressesTestJSON-1842394723 tempest-ServerAddressesTestJSON-1842394723-project-member] Expecting reply to msg cf095b9f41e1450c82199eb42ec1cf63 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 822.634832] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cf095b9f41e1450c82199eb42ec1cf63 [ 822.729956] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1ce05e1-3b88-44da-bc1a-0e9f7bb9a922 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.738203] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fe435ef-8587-485f-a2c7-52a09fd239cc {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.770966] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb258450-b799-4431-aceb-46409b331731 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.778720] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e774e3d-eda6-4557-bf20-f2975b2c981d {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.784638] env[62096]: INFO nova.scheduler.client.report [None req-35a36660-23cc-4f3d-b53e-6fe46ab70d44 tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Deleted allocations for instance 3283ae52-1a1f-4dec-91f3-44cc42361bb5 [ 822.790740] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-35a36660-23cc-4f3d-b53e-6fe46ab70d44 tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Expecting reply to msg be93dbc11c6440cb8144e8467afaf76e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 822.799297] env[62096]: DEBUG nova.compute.provider_tree [None req-e73da030-7e22-49e7-bda0-aec2ff9e56cf tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 822.800179] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-e73da030-7e22-49e7-bda0-aec2ff9e56cf tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Expecting reply to msg 39dc05aca3c24d36a7cf64799cd16175 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 822.800735] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg be93dbc11c6440cb8144e8467afaf76e [ 822.808917] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 39dc05aca3c24d36a7cf64799cd16175 [ 823.132543] env[62096]: DEBUG nova.network.neutron [None req-534ea4b7-f1b9-4ff7-be62-1798f34a685d tempest-ServerAddressesTestJSON-1842394723 tempest-ServerAddressesTestJSON-1842394723-project-member] [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 823.132543] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-534ea4b7-f1b9-4ff7-be62-1798f34a685d tempest-ServerAddressesTestJSON-1842394723 tempest-ServerAddressesTestJSON-1842394723-project-member] Expecting reply to msg 0c3bb8293eb74173bf469e3947829fab in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 823.143824] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0c3bb8293eb74173bf469e3947829fab [ 823.292339] env[62096]: DEBUG oslo_concurrency.lockutils [None req-35a36660-23cc-4f3d-b53e-6fe46ab70d44 tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Lock "3283ae52-1a1f-4dec-91f3-44cc42361bb5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 153.890s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 823.292928] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-6f6a01ff-c14d-41bb-a04a-2330ef16cae3 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Expecting reply to msg 369451e25c7947a89a3f71df2f9a74d8 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 823.302942] env[62096]: DEBUG nova.scheduler.client.report [None req-e73da030-7e22-49e7-bda0-aec2ff9e56cf tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 823.305396] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-e73da030-7e22-49e7-bda0-aec2ff9e56cf tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Expecting reply to msg d8d449889f7f4f4880952167c1b6cd7c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 823.306523] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 369451e25c7947a89a3f71df2f9a74d8 [ 823.318702] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d8d449889f7f4f4880952167c1b6cd7c [ 823.634565] env[62096]: INFO nova.compute.manager [None req-534ea4b7-f1b9-4ff7-be62-1798f34a685d tempest-ServerAddressesTestJSON-1842394723 tempest-ServerAddressesTestJSON-1842394723-project-member] [instance: c928bf83-9517-449a-854c-6f3d8ce4faa0] Took 1.03 seconds to deallocate network for instance. [ 823.636331] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-534ea4b7-f1b9-4ff7-be62-1798f34a685d tempest-ServerAddressesTestJSON-1842394723 tempest-ServerAddressesTestJSON-1842394723-project-member] Expecting reply to msg 28a10abc88b1403c95a8be03cc7d0478 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 823.682418] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 28a10abc88b1403c95a8be03cc7d0478 [ 823.794952] env[62096]: DEBUG nova.compute.manager [None req-6f6a01ff-c14d-41bb-a04a-2330ef16cae3 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] [instance: 250ef7e7-266b-451d-8627-9cce211d4e83] Starting instance... {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 823.796749] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-6f6a01ff-c14d-41bb-a04a-2330ef16cae3 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Expecting reply to msg 5a47c91ca0bf4aaf92d3e775dc961190 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 823.814312] env[62096]: DEBUG oslo_concurrency.lockutils [None req-e73da030-7e22-49e7-bda0-aec2ff9e56cf tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.340s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 823.814855] env[62096]: DEBUG nova.compute.manager [None req-e73da030-7e22-49e7-bda0-aec2ff9e56cf tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] Start building networks asynchronously for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 823.816826] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-e73da030-7e22-49e7-bda0-aec2ff9e56cf tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Expecting reply to msg 38132cb61de644c58415465e647a2ed8 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 823.818082] env[62096]: DEBUG oslo_concurrency.lockutils [None req-4da4b970-cc40-4c6a-8873-d7e30a4b4978 tempest-ServerMetadataTestJSON-1279504626 tempest-ServerMetadataTestJSON-1279504626-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.064s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 823.819462] env[62096]: INFO nova.compute.claims [None req-4da4b970-cc40-4c6a-8873-d7e30a4b4978 tempest-ServerMetadataTestJSON-1279504626 tempest-ServerMetadataTestJSON-1279504626-project-member] [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 823.821491] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4da4b970-cc40-4c6a-8873-d7e30a4b4978 tempest-ServerMetadataTestJSON-1279504626 tempest-ServerMetadataTestJSON-1279504626-project-member] Expecting reply to msg d90bba9c21eb4f8e822c385f8e4af181 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 823.837442] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5a47c91ca0bf4aaf92d3e775dc961190 [ 823.855396] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 38132cb61de644c58415465e647a2ed8 [ 823.856647] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d90bba9c21eb4f8e822c385f8e4af181 [ 824.141050] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-534ea4b7-f1b9-4ff7-be62-1798f34a685d tempest-ServerAddressesTestJSON-1842394723 tempest-ServerAddressesTestJSON-1842394723-project-member] Expecting reply to msg 831fbbaa35b7432dbf880a6bb3289699 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 824.187938] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 831fbbaa35b7432dbf880a6bb3289699 [ 824.232315] env[62096]: DEBUG oslo_concurrency.lockutils [None req-c70231a0-48a5-4b3b-b009-020c7f69a59b tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Acquiring lock "f832a621-c221-4ae8-928e-d6f9fa6b0586" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 824.232315] env[62096]: DEBUG oslo_concurrency.lockutils [None req-c70231a0-48a5-4b3b-b009-020c7f69a59b tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Lock "f832a621-c221-4ae8-928e-d6f9fa6b0586" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 824.317038] env[62096]: DEBUG oslo_concurrency.lockutils [None req-6f6a01ff-c14d-41bb-a04a-2330ef16cae3 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 824.324452] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4da4b970-cc40-4c6a-8873-d7e30a4b4978 tempest-ServerMetadataTestJSON-1279504626 tempest-ServerMetadataTestJSON-1279504626-project-member] Expecting reply to msg c02fd61d96d348c1a2137e438bde8c31 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 824.331582] env[62096]: DEBUG nova.compute.utils [None req-e73da030-7e22-49e7-bda0-aec2ff9e56cf tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Using /dev/sd instead of None {{(pid=62096) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 824.331582] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-e73da030-7e22-49e7-bda0-aec2ff9e56cf tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Expecting reply to msg 780f9ec681454457bd6818852bfb1566 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 824.331582] env[62096]: DEBUG nova.compute.manager [None req-e73da030-7e22-49e7-bda0-aec2ff9e56cf tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] Allocating IP information in the background. {{(pid=62096) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 824.331582] env[62096]: DEBUG nova.network.neutron [None req-e73da030-7e22-49e7-bda0-aec2ff9e56cf tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] allocate_for_instance() {{(pid=62096) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 824.333281] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c02fd61d96d348c1a2137e438bde8c31 [ 824.342829] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 780f9ec681454457bd6818852bfb1566 [ 824.417553] env[62096]: DEBUG nova.policy [None req-e73da030-7e22-49e7-bda0-aec2ff9e56cf tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '22b67c9741f5435c89dcd9e8ea1911a7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '78667758c2e9405680900f07d5619066', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62096) authorize /opt/stack/nova/nova/policy.py:203}} [ 824.673215] env[62096]: INFO nova.scheduler.client.report [None req-534ea4b7-f1b9-4ff7-be62-1798f34a685d tempest-ServerAddressesTestJSON-1842394723 tempest-ServerAddressesTestJSON-1842394723-project-member] Deleted allocations for instance c928bf83-9517-449a-854c-6f3d8ce4faa0 [ 824.692903] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-534ea4b7-f1b9-4ff7-be62-1798f34a685d tempest-ServerAddressesTestJSON-1842394723 tempest-ServerAddressesTestJSON-1842394723-project-member] Expecting reply to msg 4cc9133cffce4f6390f394413e598394 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 824.724753] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4cc9133cffce4f6390f394413e598394 [ 824.833113] env[62096]: DEBUG nova.compute.manager [None req-e73da030-7e22-49e7-bda0-aec2ff9e56cf tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] Start building block device mappings for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 824.835091] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-e73da030-7e22-49e7-bda0-aec2ff9e56cf tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Expecting reply to msg 3f5180f7fa5b473cae60294852417d1e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 824.878166] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3f5180f7fa5b473cae60294852417d1e [ 825.010741] env[62096]: DEBUG nova.network.neutron [None req-e73da030-7e22-49e7-bda0-aec2ff9e56cf tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] Successfully created port: 29221571-5562-44ee-b638-0096025485b7 {{(pid=62096) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 825.126371] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a181c997-2302-4341-8425-981072aa8935 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.134505] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28c57fda-8fcc-4922-ba81-1dde16ec8dba {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.008345] env[62096]: DEBUG oslo_concurrency.lockutils [None req-534ea4b7-f1b9-4ff7-be62-1798f34a685d tempest-ServerAddressesTestJSON-1842394723 tempest-ServerAddressesTestJSON-1842394723-project-member] Lock "c928bf83-9517-449a-854c-6f3d8ce4faa0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 155.675s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 826.032425] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-e73da030-7e22-49e7-bda0-aec2ff9e56cf tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Expecting reply to msg 99392433f2244690a1ce209e939fa569 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 826.032425] env[62096]: DEBUG nova.network.neutron [None req-e73da030-7e22-49e7-bda0-aec2ff9e56cf tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] Successfully created port: ca033090-8a1c-4087-aeba-100ed433cb5a {{(pid=62096) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 826.032425] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b7c0425f-be19-4b33-b788-3180cc28ca6d tempest-InstanceActionsV221TestJSON-713498315 tempest-InstanceActionsV221TestJSON-713498315-project-member] Expecting reply to msg 55a68998197a425fb8077d1076d26458 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 826.032425] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf027b20-7c25-4ef1-b7f6-af5519cbe5b5 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.032425] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4770dad-dfd9-41f8-8d35-3b5a93ce9118 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.032425] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 55a68998197a425fb8077d1076d26458 [ 826.046266] env[62096]: DEBUG nova.compute.provider_tree [None req-4da4b970-cc40-4c6a-8873-d7e30a4b4978 tempest-ServerMetadataTestJSON-1279504626 tempest-ServerMetadataTestJSON-1279504626-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 826.046786] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4da4b970-cc40-4c6a-8873-d7e30a4b4978 tempest-ServerMetadataTestJSON-1279504626 tempest-ServerMetadataTestJSON-1279504626-project-member] Expecting reply to msg 733660acd94f4238b2765bff2c403d7f in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 826.052093] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 99392433f2244690a1ce209e939fa569 [ 826.057496] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 733660acd94f4238b2765bff2c403d7f [ 826.312889] env[62096]: DEBUG oslo_concurrency.lockutils [None req-66bcc827-9e2c-4570-a904-739ded1c169f tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Acquiring lock "ce80b353-2f7c-4165-b4fa-b81a1e539295" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 826.313127] env[62096]: DEBUG oslo_concurrency.lockutils [None req-66bcc827-9e2c-4570-a904-739ded1c169f tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Lock "ce80b353-2f7c-4165-b4fa-b81a1e539295" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 826.520712] env[62096]: DEBUG nova.compute.manager [None req-e73da030-7e22-49e7-bda0-aec2ff9e56cf tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] Start spawning the instance on the hypervisor. {{(pid=62096) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 826.522890] env[62096]: DEBUG nova.compute.manager [None req-b7c0425f-be19-4b33-b788-3180cc28ca6d tempest-InstanceActionsV221TestJSON-713498315 tempest-InstanceActionsV221TestJSON-713498315-project-member] [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] Starting instance... {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 826.524538] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b7c0425f-be19-4b33-b788-3180cc28ca6d tempest-InstanceActionsV221TestJSON-713498315 tempest-InstanceActionsV221TestJSON-713498315-project-member] Expecting reply to msg abbb8e3dafc045f9b62a9f92f507c7f8 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 826.549450] env[62096]: DEBUG nova.virt.hardware [None req-e73da030-7e22-49e7-bda0-aec2ff9e56cf tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-19T11:52:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-19T11:51:55Z,direct_url=,disk_format='vmdk',id=fb764baa-3805-45c4-a694-aa91b0932110,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bd76085d004b408bbcf1ebf76ba2593e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-19T11:51:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 826.549779] env[62096]: DEBUG nova.virt.hardware [None req-e73da030-7e22-49e7-bda0-aec2ff9e56cf tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Flavor limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 826.550020] env[62096]: DEBUG nova.virt.hardware [None req-e73da030-7e22-49e7-bda0-aec2ff9e56cf tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Image limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 826.550321] env[62096]: DEBUG nova.virt.hardware [None req-e73da030-7e22-49e7-bda0-aec2ff9e56cf tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Flavor pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 826.550534] env[62096]: DEBUG nova.virt.hardware [None req-e73da030-7e22-49e7-bda0-aec2ff9e56cf tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Image pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 826.550735] env[62096]: DEBUG nova.virt.hardware [None req-e73da030-7e22-49e7-bda0-aec2ff9e56cf tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 826.550998] env[62096]: DEBUG nova.virt.hardware [None req-e73da030-7e22-49e7-bda0-aec2ff9e56cf tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 826.551207] env[62096]: DEBUG nova.virt.hardware [None req-e73da030-7e22-49e7-bda0-aec2ff9e56cf tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 826.551425] env[62096]: DEBUG nova.virt.hardware [None req-e73da030-7e22-49e7-bda0-aec2ff9e56cf tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Got 1 possible topologies {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 826.551638] env[62096]: DEBUG nova.virt.hardware [None req-e73da030-7e22-49e7-bda0-aec2ff9e56cf tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 826.551868] env[62096]: DEBUG nova.virt.hardware [None req-e73da030-7e22-49e7-bda0-aec2ff9e56cf tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 826.552805] env[62096]: DEBUG nova.scheduler.client.report [None req-4da4b970-cc40-4c6a-8873-d7e30a4b4978 tempest-ServerMetadataTestJSON-1279504626 tempest-ServerMetadataTestJSON-1279504626-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 826.555157] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4da4b970-cc40-4c6a-8873-d7e30a4b4978 tempest-ServerMetadataTestJSON-1279504626 tempest-ServerMetadataTestJSON-1279504626-project-member] Expecting reply to msg 8ed515d7ef5940f3a21b6ffb724ca939 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 826.556916] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25502e73-40a5-4c2e-8b12-c2c94ffdc284 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.567359] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78065cf6-4ea5-4265-b95d-d51ccff8be4a {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.572113] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8ed515d7ef5940f3a21b6ffb724ca939 [ 826.585359] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg abbb8e3dafc045f9b62a9f92f507c7f8 [ 827.045951] env[62096]: DEBUG oslo_concurrency.lockutils [None req-b7c0425f-be19-4b33-b788-3180cc28ca6d tempest-InstanceActionsV221TestJSON-713498315 tempest-InstanceActionsV221TestJSON-713498315-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 827.067991] env[62096]: DEBUG oslo_concurrency.lockutils [None req-4da4b970-cc40-4c6a-8873-d7e30a4b4978 tempest-ServerMetadataTestJSON-1279504626 tempest-ServerMetadataTestJSON-1279504626-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.244s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 827.067991] env[62096]: DEBUG nova.compute.manager [None req-4da4b970-cc40-4c6a-8873-d7e30a4b4978 tempest-ServerMetadataTestJSON-1279504626 tempest-ServerMetadataTestJSON-1279504626-project-member] [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] Start building networks asynchronously for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 827.067991] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4da4b970-cc40-4c6a-8873-d7e30a4b4978 tempest-ServerMetadataTestJSON-1279504626 tempest-ServerMetadataTestJSON-1279504626-project-member] Expecting reply to msg f8ce0357253c4337b5fb0e55a7cd1d2d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 827.067991] env[62096]: DEBUG oslo_concurrency.lockutils [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.171s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 827.067991] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Expecting reply to msg 4c34f9376d594095b15b1161d0c13d36 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 827.107197] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f8ce0357253c4337b5fb0e55a7cd1d2d [ 827.109077] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4c34f9376d594095b15b1161d0c13d36 [ 827.216825] env[62096]: DEBUG nova.compute.manager [req-61bfe3cf-a40f-4bc5-a896-760bb29791d5 req-42b9f68c-bda4-4ae2-a520-22481317c1c8 service nova] [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] Received event network-changed-29221571-5562-44ee-b638-0096025485b7 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 827.216825] env[62096]: DEBUG nova.compute.manager [req-61bfe3cf-a40f-4bc5-a896-760bb29791d5 req-42b9f68c-bda4-4ae2-a520-22481317c1c8 service nova] [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] Refreshing instance network info cache due to event network-changed-29221571-5562-44ee-b638-0096025485b7. {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 827.216825] env[62096]: DEBUG oslo_concurrency.lockutils [req-61bfe3cf-a40f-4bc5-a896-760bb29791d5 req-42b9f68c-bda4-4ae2-a520-22481317c1c8 service nova] Acquiring lock "refresh_cache-46232e88-ad63-42bc-bf51-2a0758e6ec3a" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 827.216825] env[62096]: DEBUG oslo_concurrency.lockutils [req-61bfe3cf-a40f-4bc5-a896-760bb29791d5 req-42b9f68c-bda4-4ae2-a520-22481317c1c8 service nova] Acquired lock "refresh_cache-46232e88-ad63-42bc-bf51-2a0758e6ec3a" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 827.216825] env[62096]: DEBUG nova.network.neutron [req-61bfe3cf-a40f-4bc5-a896-760bb29791d5 req-42b9f68c-bda4-4ae2-a520-22481317c1c8 service nova] [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] Refreshing network info cache for port 29221571-5562-44ee-b638-0096025485b7 {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 827.217176] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-61bfe3cf-a40f-4bc5-a896-760bb29791d5 req-42b9f68c-bda4-4ae2-a520-22481317c1c8 service nova] Expecting reply to msg 2da3932ed0d74bf28edd917604706827 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 827.223422] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2da3932ed0d74bf28edd917604706827 [ 827.297942] env[62096]: ERROR nova.compute.manager [None req-e73da030-7e22-49e7-bda0-aec2ff9e56cf tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 29221571-5562-44ee-b638-0096025485b7, please check neutron logs for more information. [ 827.297942] env[62096]: ERROR nova.compute.manager Traceback (most recent call last): [ 827.297942] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 827.297942] env[62096]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 827.297942] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 827.297942] env[62096]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 827.297942] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 827.297942] env[62096]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 827.297942] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 827.297942] env[62096]: ERROR nova.compute.manager self.force_reraise() [ 827.297942] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 827.297942] env[62096]: ERROR nova.compute.manager raise self.value [ 827.297942] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 827.297942] env[62096]: ERROR nova.compute.manager updated_port = self._update_port( [ 827.297942] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 827.297942] env[62096]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 827.298375] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 827.298375] env[62096]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 827.298375] env[62096]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 29221571-5562-44ee-b638-0096025485b7, please check neutron logs for more information. [ 827.298375] env[62096]: ERROR nova.compute.manager [ 827.298375] env[62096]: Traceback (most recent call last): [ 827.298375] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 827.298375] env[62096]: listener.cb(fileno) [ 827.298375] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 827.298375] env[62096]: result = function(*args, **kwargs) [ 827.298375] env[62096]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 827.298375] env[62096]: return func(*args, **kwargs) [ 827.298375] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 827.298375] env[62096]: raise e [ 827.298375] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 827.298375] env[62096]: nwinfo = self.network_api.allocate_for_instance( [ 827.298375] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 827.298375] env[62096]: created_port_ids = self._update_ports_for_instance( [ 827.298375] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 827.298375] env[62096]: with excutils.save_and_reraise_exception(): [ 827.298375] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 827.298375] env[62096]: self.force_reraise() [ 827.298375] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 827.298375] env[62096]: raise self.value [ 827.298375] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 827.298375] env[62096]: updated_port = self._update_port( [ 827.298375] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 827.298375] env[62096]: _ensure_no_port_binding_failure(port) [ 827.298375] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 827.298375] env[62096]: raise exception.PortBindingFailed(port_id=port['id']) [ 827.299045] env[62096]: nova.exception.PortBindingFailed: Binding failed for port 29221571-5562-44ee-b638-0096025485b7, please check neutron logs for more information. [ 827.299045] env[62096]: Removing descriptor: 16 [ 827.299045] env[62096]: ERROR nova.compute.manager [None req-e73da030-7e22-49e7-bda0-aec2ff9e56cf tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 29221571-5562-44ee-b638-0096025485b7, please check neutron logs for more information. [ 827.299045] env[62096]: ERROR nova.compute.manager [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] Traceback (most recent call last): [ 827.299045] env[62096]: ERROR nova.compute.manager [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 827.299045] env[62096]: ERROR nova.compute.manager [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] yield resources [ 827.299045] env[62096]: ERROR nova.compute.manager [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 827.299045] env[62096]: ERROR nova.compute.manager [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] self.driver.spawn(context, instance, image_meta, [ 827.299045] env[62096]: ERROR nova.compute.manager [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 827.299045] env[62096]: ERROR nova.compute.manager [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 827.299045] env[62096]: ERROR nova.compute.manager [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 827.299045] env[62096]: ERROR nova.compute.manager [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] vm_ref = self.build_virtual_machine(instance, [ 827.299341] env[62096]: ERROR nova.compute.manager [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 827.299341] env[62096]: ERROR nova.compute.manager [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] vif_infos = vmwarevif.get_vif_info(self._session, [ 827.299341] env[62096]: ERROR nova.compute.manager [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 827.299341] env[62096]: ERROR nova.compute.manager [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] for vif in network_info: [ 827.299341] env[62096]: ERROR nova.compute.manager [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 827.299341] env[62096]: ERROR nova.compute.manager [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] return self._sync_wrapper(fn, *args, **kwargs) [ 827.299341] env[62096]: ERROR nova.compute.manager [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 827.299341] env[62096]: ERROR nova.compute.manager [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] self.wait() [ 827.299341] env[62096]: ERROR nova.compute.manager [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 827.299341] env[62096]: ERROR nova.compute.manager [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] self[:] = self._gt.wait() [ 827.299341] env[62096]: ERROR nova.compute.manager [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 827.299341] env[62096]: ERROR nova.compute.manager [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] return self._exit_event.wait() [ 827.299341] env[62096]: ERROR nova.compute.manager [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 827.299658] env[62096]: ERROR nova.compute.manager [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] result = hub.switch() [ 827.299658] env[62096]: ERROR nova.compute.manager [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 827.299658] env[62096]: ERROR nova.compute.manager [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] return self.greenlet.switch() [ 827.299658] env[62096]: ERROR nova.compute.manager [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 827.299658] env[62096]: ERROR nova.compute.manager [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] result = function(*args, **kwargs) [ 827.299658] env[62096]: ERROR nova.compute.manager [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 827.299658] env[62096]: ERROR nova.compute.manager [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] return func(*args, **kwargs) [ 827.299658] env[62096]: ERROR nova.compute.manager [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 827.299658] env[62096]: ERROR nova.compute.manager [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] raise e [ 827.299658] env[62096]: ERROR nova.compute.manager [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 827.299658] env[62096]: ERROR nova.compute.manager [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] nwinfo = self.network_api.allocate_for_instance( [ 827.299658] env[62096]: ERROR nova.compute.manager [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 827.299658] env[62096]: ERROR nova.compute.manager [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] created_port_ids = self._update_ports_for_instance( [ 827.299986] env[62096]: ERROR nova.compute.manager [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 827.299986] env[62096]: ERROR nova.compute.manager [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] with excutils.save_and_reraise_exception(): [ 827.299986] env[62096]: ERROR nova.compute.manager [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 827.299986] env[62096]: ERROR nova.compute.manager [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] self.force_reraise() [ 827.299986] env[62096]: ERROR nova.compute.manager [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 827.299986] env[62096]: ERROR nova.compute.manager [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] raise self.value [ 827.299986] env[62096]: ERROR nova.compute.manager [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 827.299986] env[62096]: ERROR nova.compute.manager [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] updated_port = self._update_port( [ 827.299986] env[62096]: ERROR nova.compute.manager [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 827.299986] env[62096]: ERROR nova.compute.manager [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] _ensure_no_port_binding_failure(port) [ 827.299986] env[62096]: ERROR nova.compute.manager [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 827.299986] env[62096]: ERROR nova.compute.manager [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] raise exception.PortBindingFailed(port_id=port['id']) [ 827.300362] env[62096]: ERROR nova.compute.manager [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] nova.exception.PortBindingFailed: Binding failed for port 29221571-5562-44ee-b638-0096025485b7, please check neutron logs for more information. [ 827.300362] env[62096]: ERROR nova.compute.manager [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] [ 827.300362] env[62096]: INFO nova.compute.manager [None req-e73da030-7e22-49e7-bda0-aec2ff9e56cf tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] Terminating instance [ 827.301123] env[62096]: DEBUG oslo_concurrency.lockutils [None req-e73da030-7e22-49e7-bda0-aec2ff9e56cf tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Acquiring lock "refresh_cache-46232e88-ad63-42bc-bf51-2a0758e6ec3a" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 827.581297] env[62096]: DEBUG nova.compute.utils [None req-4da4b970-cc40-4c6a-8873-d7e30a4b4978 tempest-ServerMetadataTestJSON-1279504626 tempest-ServerMetadataTestJSON-1279504626-project-member] Using /dev/sd instead of None {{(pid=62096) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 827.581297] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4da4b970-cc40-4c6a-8873-d7e30a4b4978 tempest-ServerMetadataTestJSON-1279504626 tempest-ServerMetadataTestJSON-1279504626-project-member] Expecting reply to msg f61948df40f54cbdbc7054e6474d4253 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 827.581297] env[62096]: DEBUG nova.compute.manager [None req-4da4b970-cc40-4c6a-8873-d7e30a4b4978 tempest-ServerMetadataTestJSON-1279504626 tempest-ServerMetadataTestJSON-1279504626-project-member] [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] Allocating IP information in the background. {{(pid=62096) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 827.582465] env[62096]: DEBUG nova.network.neutron [None req-4da4b970-cc40-4c6a-8873-d7e30a4b4978 tempest-ServerMetadataTestJSON-1279504626 tempest-ServerMetadataTestJSON-1279504626-project-member] [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] allocate_for_instance() {{(pid=62096) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 827.588889] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f61948df40f54cbdbc7054e6474d4253 [ 827.624163] env[62096]: DEBUG nova.policy [None req-4da4b970-cc40-4c6a-8873-d7e30a4b4978 tempest-ServerMetadataTestJSON-1279504626 tempest-ServerMetadataTestJSON-1279504626-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9a23fbb593a44231b1defd19d4331fa4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9acbce33c9d84647ac290f90ab6c7f18', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62096) authorize /opt/stack/nova/nova/policy.py:203}} [ 827.822457] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae9ec6f7-703c-4593-9eb9-8f34d3bfd65c {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.830328] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c8300c4-fb00-44e7-a9e0-12bc5e04441a {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.860594] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2080580-2398-468d-9d9a-955ba0119a74 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.869861] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-840c746c-9c65-42a0-88db-0aaaed4775c3 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.884370] env[62096]: DEBUG nova.compute.provider_tree [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 827.884893] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Expecting reply to msg 4c13a88c4c91403b94c86eafd565d63b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 827.900211] env[62096]: DEBUG nova.network.neutron [req-61bfe3cf-a40f-4bc5-a896-760bb29791d5 req-42b9f68c-bda4-4ae2-a520-22481317c1c8 service nova] [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 827.900211] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4c13a88c4c91403b94c86eafd565d63b [ 828.077315] env[62096]: DEBUG nova.compute.manager [None req-4da4b970-cc40-4c6a-8873-d7e30a4b4978 tempest-ServerMetadataTestJSON-1279504626 tempest-ServerMetadataTestJSON-1279504626-project-member] [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] Start building block device mappings for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 828.079169] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4da4b970-cc40-4c6a-8873-d7e30a4b4978 tempest-ServerMetadataTestJSON-1279504626 tempest-ServerMetadataTestJSON-1279504626-project-member] Expecting reply to msg a0091a69ef954975adfb3f2119fefddf in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 828.121736] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a0091a69ef954975adfb3f2119fefddf [ 828.124701] env[62096]: DEBUG nova.network.neutron [req-61bfe3cf-a40f-4bc5-a896-760bb29791d5 req-42b9f68c-bda4-4ae2-a520-22481317c1c8 service nova] [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 828.125249] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-61bfe3cf-a40f-4bc5-a896-760bb29791d5 req-42b9f68c-bda4-4ae2-a520-22481317c1c8 service nova] Expecting reply to msg 34bdb30d35e4490bad8adcfef49cfdbb in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 828.133254] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 34bdb30d35e4490bad8adcfef49cfdbb [ 828.244955] env[62096]: DEBUG nova.network.neutron [None req-4da4b970-cc40-4c6a-8873-d7e30a4b4978 tempest-ServerMetadataTestJSON-1279504626 tempest-ServerMetadataTestJSON-1279504626-project-member] [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] Successfully created port: 33af3186-72fb-4c89-981b-e53007b95853 {{(pid=62096) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 828.397055] env[62096]: DEBUG nova.scheduler.client.report [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 828.399312] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Expecting reply to msg 69ebb7978e7547ea8f2e1e3cd059267d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 828.415151] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 69ebb7978e7547ea8f2e1e3cd059267d [ 828.585164] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4da4b970-cc40-4c6a-8873-d7e30a4b4978 tempest-ServerMetadataTestJSON-1279504626 tempest-ServerMetadataTestJSON-1279504626-project-member] Expecting reply to msg 2175a999935143cc80a6bd06687f9950 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 828.619495] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2175a999935143cc80a6bd06687f9950 [ 828.627071] env[62096]: DEBUG oslo_concurrency.lockutils [req-61bfe3cf-a40f-4bc5-a896-760bb29791d5 req-42b9f68c-bda4-4ae2-a520-22481317c1c8 service nova] Releasing lock "refresh_cache-46232e88-ad63-42bc-bf51-2a0758e6ec3a" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 828.627494] env[62096]: DEBUG oslo_concurrency.lockutils [None req-e73da030-7e22-49e7-bda0-aec2ff9e56cf tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Acquired lock "refresh_cache-46232e88-ad63-42bc-bf51-2a0758e6ec3a" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 828.627686] env[62096]: DEBUG nova.network.neutron [None req-e73da030-7e22-49e7-bda0-aec2ff9e56cf tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 828.628125] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-e73da030-7e22-49e7-bda0-aec2ff9e56cf tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Expecting reply to msg 9f4268f6a49343559254b3ef23666c4b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 828.639490] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9f4268f6a49343559254b3ef23666c4b [ 828.901754] env[62096]: DEBUG oslo_concurrency.lockutils [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.836s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 828.902505] env[62096]: ERROR nova.compute.manager [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 6419be91-62e8-48ce-8284-cedcf93c1574, please check neutron logs for more information. [ 828.902505] env[62096]: ERROR nova.compute.manager [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] Traceback (most recent call last): [ 828.902505] env[62096]: ERROR nova.compute.manager [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 828.902505] env[62096]: ERROR nova.compute.manager [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] self.driver.spawn(context, instance, image_meta, [ 828.902505] env[62096]: ERROR nova.compute.manager [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 828.902505] env[62096]: ERROR nova.compute.manager [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] self._vmops.spawn(context, instance, image_meta, injected_files, [ 828.902505] env[62096]: ERROR nova.compute.manager [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 828.902505] env[62096]: ERROR nova.compute.manager [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] vm_ref = self.build_virtual_machine(instance, [ 828.902505] env[62096]: ERROR nova.compute.manager [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 828.902505] env[62096]: ERROR nova.compute.manager [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] vif_infos = vmwarevif.get_vif_info(self._session, [ 828.902505] env[62096]: ERROR nova.compute.manager [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 828.902980] env[62096]: ERROR nova.compute.manager [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] for vif in network_info: [ 828.902980] env[62096]: ERROR nova.compute.manager [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 828.902980] env[62096]: ERROR nova.compute.manager [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] return self._sync_wrapper(fn, *args, **kwargs) [ 828.902980] env[62096]: ERROR nova.compute.manager [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 828.902980] env[62096]: ERROR nova.compute.manager [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] self.wait() [ 828.902980] env[62096]: ERROR nova.compute.manager [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 828.902980] env[62096]: ERROR nova.compute.manager [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] self[:] = self._gt.wait() [ 828.902980] env[62096]: ERROR nova.compute.manager [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 828.902980] env[62096]: ERROR nova.compute.manager [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] return self._exit_event.wait() [ 828.902980] env[62096]: ERROR nova.compute.manager [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 828.902980] env[62096]: ERROR nova.compute.manager [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] result = hub.switch() [ 828.902980] env[62096]: ERROR nova.compute.manager [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 828.902980] env[62096]: ERROR nova.compute.manager [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] return self.greenlet.switch() [ 828.903434] env[62096]: ERROR nova.compute.manager [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 828.903434] env[62096]: ERROR nova.compute.manager [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] result = function(*args, **kwargs) [ 828.903434] env[62096]: ERROR nova.compute.manager [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 828.903434] env[62096]: ERROR nova.compute.manager [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] return func(*args, **kwargs) [ 828.903434] env[62096]: ERROR nova.compute.manager [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 828.903434] env[62096]: ERROR nova.compute.manager [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] raise e [ 828.903434] env[62096]: ERROR nova.compute.manager [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 828.903434] env[62096]: ERROR nova.compute.manager [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] nwinfo = self.network_api.allocate_for_instance( [ 828.903434] env[62096]: ERROR nova.compute.manager [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 828.903434] env[62096]: ERROR nova.compute.manager [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] created_port_ids = self._update_ports_for_instance( [ 828.903434] env[62096]: ERROR nova.compute.manager [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 828.903434] env[62096]: ERROR nova.compute.manager [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] with excutils.save_and_reraise_exception(): [ 828.903434] env[62096]: ERROR nova.compute.manager [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 828.903886] env[62096]: ERROR nova.compute.manager [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] self.force_reraise() [ 828.903886] env[62096]: ERROR nova.compute.manager [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 828.903886] env[62096]: ERROR nova.compute.manager [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] raise self.value [ 828.903886] env[62096]: ERROR nova.compute.manager [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 828.903886] env[62096]: ERROR nova.compute.manager [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] updated_port = self._update_port( [ 828.903886] env[62096]: ERROR nova.compute.manager [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 828.903886] env[62096]: ERROR nova.compute.manager [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] _ensure_no_port_binding_failure(port) [ 828.903886] env[62096]: ERROR nova.compute.manager [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 828.903886] env[62096]: ERROR nova.compute.manager [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] raise exception.PortBindingFailed(port_id=port['id']) [ 828.903886] env[62096]: ERROR nova.compute.manager [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] nova.exception.PortBindingFailed: Binding failed for port 6419be91-62e8-48ce-8284-cedcf93c1574, please check neutron logs for more information. [ 828.903886] env[62096]: ERROR nova.compute.manager [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] [ 828.904389] env[62096]: DEBUG nova.compute.utils [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] Binding failed for port 6419be91-62e8-48ce-8284-cedcf93c1574, please check neutron logs for more information. {{(pid=62096) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 828.905289] env[62096]: DEBUG oslo_concurrency.lockutils [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.322s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 828.907286] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Expecting reply to msg 83fc12616de548d98627bfcc9b72ba21 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 828.908512] env[62096]: DEBUG nova.compute.manager [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] Build of instance a5b5afc7-abc0-430a-b682-2c1946b4a6d8 was re-scheduled: Binding failed for port 6419be91-62e8-48ce-8284-cedcf93c1574, please check neutron logs for more information. {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 828.912339] env[62096]: DEBUG nova.compute.manager [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] Unplugging VIFs for instance {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 828.912610] env[62096]: DEBUG oslo_concurrency.lockutils [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Acquiring lock "refresh_cache-a5b5afc7-abc0-430a-b682-2c1946b4a6d8" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 828.912851] env[62096]: DEBUG oslo_concurrency.lockutils [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Acquired lock "refresh_cache-a5b5afc7-abc0-430a-b682-2c1946b4a6d8" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 828.913067] env[62096]: DEBUG nova.network.neutron [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 828.913559] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Expecting reply to msg 880981f8675548a3a1252e7dcc8551f7 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 828.919787] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 880981f8675548a3a1252e7dcc8551f7 [ 828.943932] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 83fc12616de548d98627bfcc9b72ba21 [ 829.088928] env[62096]: DEBUG nova.compute.manager [None req-4da4b970-cc40-4c6a-8873-d7e30a4b4978 tempest-ServerMetadataTestJSON-1279504626 tempest-ServerMetadataTestJSON-1279504626-project-member] [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] Start spawning the instance on the hypervisor. {{(pid=62096) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 829.113623] env[62096]: DEBUG nova.virt.hardware [None req-4da4b970-cc40-4c6a-8873-d7e30a4b4978 tempest-ServerMetadataTestJSON-1279504626 tempest-ServerMetadataTestJSON-1279504626-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-19T11:52:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-19T11:51:55Z,direct_url=,disk_format='vmdk',id=fb764baa-3805-45c4-a694-aa91b0932110,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bd76085d004b408bbcf1ebf76ba2593e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-19T11:51:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 829.113976] env[62096]: DEBUG nova.virt.hardware [None req-4da4b970-cc40-4c6a-8873-d7e30a4b4978 tempest-ServerMetadataTestJSON-1279504626 tempest-ServerMetadataTestJSON-1279504626-project-member] Flavor limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 829.114232] env[62096]: DEBUG nova.virt.hardware [None req-4da4b970-cc40-4c6a-8873-d7e30a4b4978 tempest-ServerMetadataTestJSON-1279504626 tempest-ServerMetadataTestJSON-1279504626-project-member] Image limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 829.114462] env[62096]: DEBUG nova.virt.hardware [None req-4da4b970-cc40-4c6a-8873-d7e30a4b4978 tempest-ServerMetadataTestJSON-1279504626 tempest-ServerMetadataTestJSON-1279504626-project-member] Flavor pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 829.114711] env[62096]: DEBUG nova.virt.hardware [None req-4da4b970-cc40-4c6a-8873-d7e30a4b4978 tempest-ServerMetadataTestJSON-1279504626 tempest-ServerMetadataTestJSON-1279504626-project-member] Image pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 829.114997] env[62096]: DEBUG nova.virt.hardware [None req-4da4b970-cc40-4c6a-8873-d7e30a4b4978 tempest-ServerMetadataTestJSON-1279504626 tempest-ServerMetadataTestJSON-1279504626-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 829.115264] env[62096]: DEBUG nova.virt.hardware [None req-4da4b970-cc40-4c6a-8873-d7e30a4b4978 tempest-ServerMetadataTestJSON-1279504626 tempest-ServerMetadataTestJSON-1279504626-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 829.115461] env[62096]: DEBUG nova.virt.hardware [None req-4da4b970-cc40-4c6a-8873-d7e30a4b4978 tempest-ServerMetadataTestJSON-1279504626 tempest-ServerMetadataTestJSON-1279504626-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 829.115716] env[62096]: DEBUG nova.virt.hardware [None req-4da4b970-cc40-4c6a-8873-d7e30a4b4978 tempest-ServerMetadataTestJSON-1279504626 tempest-ServerMetadataTestJSON-1279504626-project-member] Got 1 possible topologies {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 829.115865] env[62096]: DEBUG nova.virt.hardware [None req-4da4b970-cc40-4c6a-8873-d7e30a4b4978 tempest-ServerMetadataTestJSON-1279504626 tempest-ServerMetadataTestJSON-1279504626-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 829.116083] env[62096]: DEBUG nova.virt.hardware [None req-4da4b970-cc40-4c6a-8873-d7e30a4b4978 tempest-ServerMetadataTestJSON-1279504626 tempest-ServerMetadataTestJSON-1279504626-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 829.117010] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c78837f1-64e2-40f3-b6ec-2e6ec8057ba7 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.125720] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48464868-c5c9-47fd-863b-4f6d5ef7ebf4 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.209119] env[62096]: DEBUG nova.network.neutron [None req-e73da030-7e22-49e7-bda0-aec2ff9e56cf tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 829.261428] env[62096]: DEBUG nova.compute.manager [req-7310fc17-551c-46c8-9480-74d82210426e req-1be661cb-2671-4157-ac0f-1903fb2a5948 service nova] [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] Received event network-vif-deleted-29221571-5562-44ee-b638-0096025485b7 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 829.445733] env[62096]: DEBUG nova.network.neutron [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 829.468062] env[62096]: DEBUG nova.network.neutron [None req-e73da030-7e22-49e7-bda0-aec2ff9e56cf tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 829.468584] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-e73da030-7e22-49e7-bda0-aec2ff9e56cf tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Expecting reply to msg 4bd6015be5c045f79349c4a64d739728 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 829.481774] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4bd6015be5c045f79349c4a64d739728 [ 829.573887] env[62096]: DEBUG nova.network.neutron [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 829.574436] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Expecting reply to msg 5d87611395bb4160b86598150bb3c1fe in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 829.585674] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5d87611395bb4160b86598150bb3c1fe [ 829.652077] env[62096]: ERROR nova.compute.manager [None req-4da4b970-cc40-4c6a-8873-d7e30a4b4978 tempest-ServerMetadataTestJSON-1279504626 tempest-ServerMetadataTestJSON-1279504626-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 33af3186-72fb-4c89-981b-e53007b95853, please check neutron logs for more information. [ 829.652077] env[62096]: ERROR nova.compute.manager Traceback (most recent call last): [ 829.652077] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 829.652077] env[62096]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 829.652077] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 829.652077] env[62096]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 829.652077] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 829.652077] env[62096]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 829.652077] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 829.652077] env[62096]: ERROR nova.compute.manager self.force_reraise() [ 829.652077] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 829.652077] env[62096]: ERROR nova.compute.manager raise self.value [ 829.652077] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 829.652077] env[62096]: ERROR nova.compute.manager updated_port = self._update_port( [ 829.652077] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 829.652077] env[62096]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 829.652557] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 829.652557] env[62096]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 829.652557] env[62096]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 33af3186-72fb-4c89-981b-e53007b95853, please check neutron logs for more information. [ 829.652557] env[62096]: ERROR nova.compute.manager [ 829.652557] env[62096]: Traceback (most recent call last): [ 829.652557] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 829.652557] env[62096]: listener.cb(fileno) [ 829.652557] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 829.652557] env[62096]: result = function(*args, **kwargs) [ 829.652557] env[62096]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 829.652557] env[62096]: return func(*args, **kwargs) [ 829.652557] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 829.652557] env[62096]: raise e [ 829.652557] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 829.652557] env[62096]: nwinfo = self.network_api.allocate_for_instance( [ 829.652557] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 829.652557] env[62096]: created_port_ids = self._update_ports_for_instance( [ 829.652557] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 829.652557] env[62096]: with excutils.save_and_reraise_exception(): [ 829.652557] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 829.652557] env[62096]: self.force_reraise() [ 829.652557] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 829.652557] env[62096]: raise self.value [ 829.652557] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 829.652557] env[62096]: updated_port = self._update_port( [ 829.652557] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 829.652557] env[62096]: _ensure_no_port_binding_failure(port) [ 829.652557] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 829.652557] env[62096]: raise exception.PortBindingFailed(port_id=port['id']) [ 829.653334] env[62096]: nova.exception.PortBindingFailed: Binding failed for port 33af3186-72fb-4c89-981b-e53007b95853, please check neutron logs for more information. [ 829.653334] env[62096]: Removing descriptor: 16 [ 829.653722] env[62096]: ERROR nova.compute.manager [None req-4da4b970-cc40-4c6a-8873-d7e30a4b4978 tempest-ServerMetadataTestJSON-1279504626 tempest-ServerMetadataTestJSON-1279504626-project-member] [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 33af3186-72fb-4c89-981b-e53007b95853, please check neutron logs for more information. [ 829.653722] env[62096]: ERROR nova.compute.manager [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] Traceback (most recent call last): [ 829.653722] env[62096]: ERROR nova.compute.manager [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 829.653722] env[62096]: ERROR nova.compute.manager [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] yield resources [ 829.653722] env[62096]: ERROR nova.compute.manager [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 829.653722] env[62096]: ERROR nova.compute.manager [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] self.driver.spawn(context, instance, image_meta, [ 829.653722] env[62096]: ERROR nova.compute.manager [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 829.653722] env[62096]: ERROR nova.compute.manager [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] self._vmops.spawn(context, instance, image_meta, injected_files, [ 829.653722] env[62096]: ERROR nova.compute.manager [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 829.653722] env[62096]: ERROR nova.compute.manager [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] vm_ref = self.build_virtual_machine(instance, [ 829.653722] env[62096]: ERROR nova.compute.manager [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 829.654075] env[62096]: ERROR nova.compute.manager [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] vif_infos = vmwarevif.get_vif_info(self._session, [ 829.654075] env[62096]: ERROR nova.compute.manager [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 829.654075] env[62096]: ERROR nova.compute.manager [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] for vif in network_info: [ 829.654075] env[62096]: ERROR nova.compute.manager [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 829.654075] env[62096]: ERROR nova.compute.manager [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] return self._sync_wrapper(fn, *args, **kwargs) [ 829.654075] env[62096]: ERROR nova.compute.manager [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 829.654075] env[62096]: ERROR nova.compute.manager [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] self.wait() [ 829.654075] env[62096]: ERROR nova.compute.manager [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 829.654075] env[62096]: ERROR nova.compute.manager [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] self[:] = self._gt.wait() [ 829.654075] env[62096]: ERROR nova.compute.manager [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 829.654075] env[62096]: ERROR nova.compute.manager [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] return self._exit_event.wait() [ 829.654075] env[62096]: ERROR nova.compute.manager [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 829.654075] env[62096]: ERROR nova.compute.manager [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] result = hub.switch() [ 829.654409] env[62096]: ERROR nova.compute.manager [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 829.654409] env[62096]: ERROR nova.compute.manager [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] return self.greenlet.switch() [ 829.654409] env[62096]: ERROR nova.compute.manager [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 829.654409] env[62096]: ERROR nova.compute.manager [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] result = function(*args, **kwargs) [ 829.654409] env[62096]: ERROR nova.compute.manager [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 829.654409] env[62096]: ERROR nova.compute.manager [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] return func(*args, **kwargs) [ 829.654409] env[62096]: ERROR nova.compute.manager [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 829.654409] env[62096]: ERROR nova.compute.manager [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] raise e [ 829.654409] env[62096]: ERROR nova.compute.manager [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 829.654409] env[62096]: ERROR nova.compute.manager [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] nwinfo = self.network_api.allocate_for_instance( [ 829.654409] env[62096]: ERROR nova.compute.manager [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 829.654409] env[62096]: ERROR nova.compute.manager [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] created_port_ids = self._update_ports_for_instance( [ 829.654409] env[62096]: ERROR nova.compute.manager [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 829.654747] env[62096]: ERROR nova.compute.manager [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] with excutils.save_and_reraise_exception(): [ 829.654747] env[62096]: ERROR nova.compute.manager [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 829.654747] env[62096]: ERROR nova.compute.manager [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] self.force_reraise() [ 829.654747] env[62096]: ERROR nova.compute.manager [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 829.654747] env[62096]: ERROR nova.compute.manager [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] raise self.value [ 829.654747] env[62096]: ERROR nova.compute.manager [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 829.654747] env[62096]: ERROR nova.compute.manager [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] updated_port = self._update_port( [ 829.654747] env[62096]: ERROR nova.compute.manager [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 829.654747] env[62096]: ERROR nova.compute.manager [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] _ensure_no_port_binding_failure(port) [ 829.654747] env[62096]: ERROR nova.compute.manager [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 829.654747] env[62096]: ERROR nova.compute.manager [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] raise exception.PortBindingFailed(port_id=port['id']) [ 829.654747] env[62096]: ERROR nova.compute.manager [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] nova.exception.PortBindingFailed: Binding failed for port 33af3186-72fb-4c89-981b-e53007b95853, please check neutron logs for more information. [ 829.654747] env[62096]: ERROR nova.compute.manager [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] [ 829.655089] env[62096]: INFO nova.compute.manager [None req-4da4b970-cc40-4c6a-8873-d7e30a4b4978 tempest-ServerMetadataTestJSON-1279504626 tempest-ServerMetadataTestJSON-1279504626-project-member] [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] Terminating instance [ 829.659729] env[62096]: DEBUG oslo_concurrency.lockutils [None req-4da4b970-cc40-4c6a-8873-d7e30a4b4978 tempest-ServerMetadataTestJSON-1279504626 tempest-ServerMetadataTestJSON-1279504626-project-member] Acquiring lock "refresh_cache-b54dd1f8-2e8d-446d-9145-d034664b7069" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 829.659938] env[62096]: DEBUG oslo_concurrency.lockutils [None req-4da4b970-cc40-4c6a-8873-d7e30a4b4978 tempest-ServerMetadataTestJSON-1279504626 tempest-ServerMetadataTestJSON-1279504626-project-member] Acquired lock "refresh_cache-b54dd1f8-2e8d-446d-9145-d034664b7069" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 829.660166] env[62096]: DEBUG nova.network.neutron [None req-4da4b970-cc40-4c6a-8873-d7e30a4b4978 tempest-ServerMetadataTestJSON-1279504626 tempest-ServerMetadataTestJSON-1279504626-project-member] [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 829.660622] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4da4b970-cc40-4c6a-8873-d7e30a4b4978 tempest-ServerMetadataTestJSON-1279504626 tempest-ServerMetadataTestJSON-1279504626-project-member] Expecting reply to msg 4baad31e8af64afcba79b4e35c3c0ad8 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 829.668504] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4baad31e8af64afcba79b4e35c3c0ad8 [ 829.701599] env[62096]: DEBUG nova.network.neutron [None req-4da4b970-cc40-4c6a-8873-d7e30a4b4978 tempest-ServerMetadataTestJSON-1279504626 tempest-ServerMetadataTestJSON-1279504626-project-member] [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 829.708180] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-660b08e7-5e42-4139-9cd0-97c3e1248907 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.716558] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-931ca5e3-e5fb-4ed3-94da-e0a0a699b743 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.750718] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31927e47-88ad-45bd-bb20-27fbbd51e482 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.758268] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4527f517-85f9-4342-84b0-cbb7227682f8 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.772485] env[62096]: DEBUG nova.compute.provider_tree [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 829.773168] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Expecting reply to msg edbdaef9cb214000a2a0af55b0b7b978 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 829.782514] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg edbdaef9cb214000a2a0af55b0b7b978 [ 829.825164] env[62096]: DEBUG nova.network.neutron [None req-4da4b970-cc40-4c6a-8873-d7e30a4b4978 tempest-ServerMetadataTestJSON-1279504626 tempest-ServerMetadataTestJSON-1279504626-project-member] [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 829.825651] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4da4b970-cc40-4c6a-8873-d7e30a4b4978 tempest-ServerMetadataTestJSON-1279504626 tempest-ServerMetadataTestJSON-1279504626-project-member] Expecting reply to msg a0dd70c5d4194937b7a874047f907a99 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 829.834994] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a0dd70c5d4194937b7a874047f907a99 [ 829.975860] env[62096]: DEBUG oslo_concurrency.lockutils [None req-e73da030-7e22-49e7-bda0-aec2ff9e56cf tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Releasing lock "refresh_cache-46232e88-ad63-42bc-bf51-2a0758e6ec3a" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 829.976344] env[62096]: DEBUG nova.compute.manager [None req-e73da030-7e22-49e7-bda0-aec2ff9e56cf tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] Start destroying the instance on the hypervisor. {{(pid=62096) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 829.976551] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-e73da030-7e22-49e7-bda0-aec2ff9e56cf tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] Destroying instance {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 829.976833] env[62096]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9de5c7aa-b347-4315-bc94-0f6165708bcb {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.987261] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fab0d50f-5fcc-4e5b-a444-90568a6e7621 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.008632] env[62096]: WARNING nova.virt.vmwareapi.vmops [None req-e73da030-7e22-49e7-bda0-aec2ff9e56cf tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 46232e88-ad63-42bc-bf51-2a0758e6ec3a could not be found. [ 830.008874] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-e73da030-7e22-49e7-bda0-aec2ff9e56cf tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] Instance destroyed {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 830.009117] env[62096]: INFO nova.compute.manager [None req-e73da030-7e22-49e7-bda0-aec2ff9e56cf tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] Took 0.03 seconds to destroy the instance on the hypervisor. [ 830.010429] env[62096]: DEBUG oslo.service.loopingcall [None req-e73da030-7e22-49e7-bda0-aec2ff9e56cf tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 830.010642] env[62096]: DEBUG nova.compute.manager [-] [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 830.010732] env[62096]: DEBUG nova.network.neutron [-] [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 830.045989] env[62096]: DEBUG nova.network.neutron [-] [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 830.077256] env[62096]: DEBUG oslo_concurrency.lockutils [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Releasing lock "refresh_cache-a5b5afc7-abc0-430a-b682-2c1946b4a6d8" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 830.077549] env[62096]: DEBUG nova.compute.manager [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 830.077775] env[62096]: DEBUG nova.compute.manager [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 830.077982] env[62096]: DEBUG nova.network.neutron [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 830.094103] env[62096]: DEBUG nova.network.neutron [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 830.094754] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Expecting reply to msg 44bf5644f56045cb8cdf10f61898df1b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 830.101687] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 44bf5644f56045cb8cdf10f61898df1b [ 830.263872] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 6fe9ed4bae0a41559762e60168e73a14 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 830.275782] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6fe9ed4bae0a41559762e60168e73a14 [ 830.283587] env[62096]: DEBUG nova.scheduler.client.report [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 830.285936] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Expecting reply to msg 1df4fb963fc448e38b44556ea2aeaa3d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 830.297285] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1df4fb963fc448e38b44556ea2aeaa3d [ 830.331207] env[62096]: DEBUG oslo_concurrency.lockutils [None req-4da4b970-cc40-4c6a-8873-d7e30a4b4978 tempest-ServerMetadataTestJSON-1279504626 tempest-ServerMetadataTestJSON-1279504626-project-member] Releasing lock "refresh_cache-b54dd1f8-2e8d-446d-9145-d034664b7069" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 830.331612] env[62096]: DEBUG nova.compute.manager [None req-4da4b970-cc40-4c6a-8873-d7e30a4b4978 tempest-ServerMetadataTestJSON-1279504626 tempest-ServerMetadataTestJSON-1279504626-project-member] [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] Start destroying the instance on the hypervisor. {{(pid=62096) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 830.331797] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-4da4b970-cc40-4c6a-8873-d7e30a4b4978 tempest-ServerMetadataTestJSON-1279504626 tempest-ServerMetadataTestJSON-1279504626-project-member] [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] Destroying instance {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 830.332116] env[62096]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f6ead234-27a3-407e-9d1d-475ff2c3d21d {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.340629] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-369480db-0352-4343-acfd-4b303fc38f0f {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.362810] env[62096]: WARNING nova.virt.vmwareapi.vmops [None req-4da4b970-cc40-4c6a-8873-d7e30a4b4978 tempest-ServerMetadataTestJSON-1279504626 tempest-ServerMetadataTestJSON-1279504626-project-member] [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance b54dd1f8-2e8d-446d-9145-d034664b7069 could not be found. [ 830.363050] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-4da4b970-cc40-4c6a-8873-d7e30a4b4978 tempest-ServerMetadataTestJSON-1279504626 tempest-ServerMetadataTestJSON-1279504626-project-member] [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] Instance destroyed {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 830.363271] env[62096]: INFO nova.compute.manager [None req-4da4b970-cc40-4c6a-8873-d7e30a4b4978 tempest-ServerMetadataTestJSON-1279504626 tempest-ServerMetadataTestJSON-1279504626-project-member] [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] Took 0.03 seconds to destroy the instance on the hypervisor. [ 830.363514] env[62096]: DEBUG oslo.service.loopingcall [None req-4da4b970-cc40-4c6a-8873-d7e30a4b4978 tempest-ServerMetadataTestJSON-1279504626 tempest-ServerMetadataTestJSON-1279504626-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 830.364119] env[62096]: DEBUG nova.compute.manager [-] [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 830.364219] env[62096]: DEBUG nova.network.neutron [-] [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 830.387694] env[62096]: DEBUG nova.network.neutron [-] [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 830.388232] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg e8eb004d24b546a8b5fb71a68ab8d49c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 830.395337] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e8eb004d24b546a8b5fb71a68ab8d49c [ 830.596988] env[62096]: DEBUG nova.network.neutron [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 830.597602] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Expecting reply to msg 865992672cf547dca594b76604bbce08 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 830.605521] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 865992672cf547dca594b76604bbce08 [ 830.770171] env[62096]: DEBUG nova.network.neutron [-] [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 830.770673] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg d97bfdf0fa49412b990009ee12067319 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 830.779227] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d97bfdf0fa49412b990009ee12067319 [ 830.788170] env[62096]: DEBUG oslo_concurrency.lockutils [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.883s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 830.788771] env[62096]: ERROR nova.compute.manager [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port fe3b5425-7821-4806-a61c-d0d8e6c0b228, please check neutron logs for more information. [ 830.788771] env[62096]: ERROR nova.compute.manager [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] Traceback (most recent call last): [ 830.788771] env[62096]: ERROR nova.compute.manager [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 830.788771] env[62096]: ERROR nova.compute.manager [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] self.driver.spawn(context, instance, image_meta, [ 830.788771] env[62096]: ERROR nova.compute.manager [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 830.788771] env[62096]: ERROR nova.compute.manager [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 830.788771] env[62096]: ERROR nova.compute.manager [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 830.788771] env[62096]: ERROR nova.compute.manager [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] vm_ref = self.build_virtual_machine(instance, [ 830.788771] env[62096]: ERROR nova.compute.manager [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 830.788771] env[62096]: ERROR nova.compute.manager [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] vif_infos = vmwarevif.get_vif_info(self._session, [ 830.788771] env[62096]: ERROR nova.compute.manager [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 830.789125] env[62096]: ERROR nova.compute.manager [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] for vif in network_info: [ 830.789125] env[62096]: ERROR nova.compute.manager [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 830.789125] env[62096]: ERROR nova.compute.manager [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] return self._sync_wrapper(fn, *args, **kwargs) [ 830.789125] env[62096]: ERROR nova.compute.manager [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 830.789125] env[62096]: ERROR nova.compute.manager [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] self.wait() [ 830.789125] env[62096]: ERROR nova.compute.manager [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 830.789125] env[62096]: ERROR nova.compute.manager [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] self[:] = self._gt.wait() [ 830.789125] env[62096]: ERROR nova.compute.manager [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 830.789125] env[62096]: ERROR nova.compute.manager [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] return self._exit_event.wait() [ 830.789125] env[62096]: ERROR nova.compute.manager [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 830.789125] env[62096]: ERROR nova.compute.manager [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] current.throw(*self._exc) [ 830.789125] env[62096]: ERROR nova.compute.manager [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 830.789125] env[62096]: ERROR nova.compute.manager [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] result = function(*args, **kwargs) [ 830.789462] env[62096]: ERROR nova.compute.manager [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 830.789462] env[62096]: ERROR nova.compute.manager [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] return func(*args, **kwargs) [ 830.789462] env[62096]: ERROR nova.compute.manager [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 830.789462] env[62096]: ERROR nova.compute.manager [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] raise e [ 830.789462] env[62096]: ERROR nova.compute.manager [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 830.789462] env[62096]: ERROR nova.compute.manager [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] nwinfo = self.network_api.allocate_for_instance( [ 830.789462] env[62096]: ERROR nova.compute.manager [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 830.789462] env[62096]: ERROR nova.compute.manager [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] created_port_ids = self._update_ports_for_instance( [ 830.789462] env[62096]: ERROR nova.compute.manager [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 830.789462] env[62096]: ERROR nova.compute.manager [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] with excutils.save_and_reraise_exception(): [ 830.789462] env[62096]: ERROR nova.compute.manager [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 830.789462] env[62096]: ERROR nova.compute.manager [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] self.force_reraise() [ 830.789462] env[62096]: ERROR nova.compute.manager [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 830.789794] env[62096]: ERROR nova.compute.manager [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] raise self.value [ 830.789794] env[62096]: ERROR nova.compute.manager [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 830.789794] env[62096]: ERROR nova.compute.manager [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] updated_port = self._update_port( [ 830.789794] env[62096]: ERROR nova.compute.manager [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 830.789794] env[62096]: ERROR nova.compute.manager [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] _ensure_no_port_binding_failure(port) [ 830.789794] env[62096]: ERROR nova.compute.manager [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 830.789794] env[62096]: ERROR nova.compute.manager [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] raise exception.PortBindingFailed(port_id=port['id']) [ 830.789794] env[62096]: ERROR nova.compute.manager [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] nova.exception.PortBindingFailed: Binding failed for port fe3b5425-7821-4806-a61c-d0d8e6c0b228, please check neutron logs for more information. [ 830.789794] env[62096]: ERROR nova.compute.manager [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] [ 830.789794] env[62096]: DEBUG nova.compute.utils [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] Binding failed for port fe3b5425-7821-4806-a61c-d0d8e6c0b228, please check neutron logs for more information. {{(pid=62096) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 830.792225] env[62096]: DEBUG nova.compute.manager [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] Build of instance 4ce3f48e-e45c-4628-8c35-8493c655a6f3 was re-scheduled: Binding failed for port fe3b5425-7821-4806-a61c-d0d8e6c0b228, please check neutron logs for more information. {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 830.792225] env[62096]: DEBUG nova.compute.manager [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] Unplugging VIFs for instance {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 830.792454] env[62096]: DEBUG oslo_concurrency.lockutils [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Acquiring lock "refresh_cache-4ce3f48e-e45c-4628-8c35-8493c655a6f3" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 830.792583] env[62096]: DEBUG oslo_concurrency.lockutils [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Acquired lock "refresh_cache-4ce3f48e-e45c-4628-8c35-8493c655a6f3" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 830.792676] env[62096]: DEBUG nova.network.neutron [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 830.793176] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Expecting reply to msg 53e34942b3664f2ab2013037e54ed044 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 830.795165] env[62096]: DEBUG oslo_concurrency.lockutils [None req-31b37f0f-ed04-4f12-864d-f77713fa80b1 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.312s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 830.797227] env[62096]: INFO nova.compute.claims [None req-31b37f0f-ed04-4f12-864d-f77713fa80b1 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 830.798917] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-31b37f0f-ed04-4f12-864d-f77713fa80b1 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Expecting reply to msg 6f441e535ce64392b60ee419a51b34de in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 830.800719] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 53e34942b3664f2ab2013037e54ed044 [ 830.838380] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6f441e535ce64392b60ee419a51b34de [ 830.890476] env[62096]: DEBUG nova.network.neutron [-] [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 830.890858] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 689f400dd4f04308bb5942ac25e24b93 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 830.899715] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 689f400dd4f04308bb5942ac25e24b93 [ 831.099826] env[62096]: INFO nova.compute.manager [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: a5b5afc7-abc0-430a-b682-2c1946b4a6d8] Took 1.02 seconds to deallocate network for instance. [ 831.101624] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Expecting reply to msg 4c6b0638afb54f4eb598280cb004b60b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 831.134645] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4c6b0638afb54f4eb598280cb004b60b [ 831.275054] env[62096]: INFO nova.compute.manager [-] [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] Took 1.26 seconds to deallocate network for instance. [ 831.275835] env[62096]: DEBUG nova.compute.claims [None req-e73da030-7e22-49e7-bda0-aec2ff9e56cf tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] Aborting claim: {{(pid=62096) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 831.276019] env[62096]: DEBUG oslo_concurrency.lockutils [None req-e73da030-7e22-49e7-bda0-aec2ff9e56cf tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 831.293458] env[62096]: DEBUG nova.compute.manager [req-a1a86112-be95-4a7a-9c99-222d59ab6465 req-a5041c04-af4e-463d-8c93-e7ec217d3740 service nova] [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] Received event network-changed-33af3186-72fb-4c89-981b-e53007b95853 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 831.293642] env[62096]: DEBUG nova.compute.manager [req-a1a86112-be95-4a7a-9c99-222d59ab6465 req-a5041c04-af4e-463d-8c93-e7ec217d3740 service nova] [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] Refreshing instance network info cache due to event network-changed-33af3186-72fb-4c89-981b-e53007b95853. {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 831.293844] env[62096]: DEBUG oslo_concurrency.lockutils [req-a1a86112-be95-4a7a-9c99-222d59ab6465 req-a5041c04-af4e-463d-8c93-e7ec217d3740 service nova] Acquiring lock "refresh_cache-b54dd1f8-2e8d-446d-9145-d034664b7069" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 831.294008] env[62096]: DEBUG oslo_concurrency.lockutils [req-a1a86112-be95-4a7a-9c99-222d59ab6465 req-a5041c04-af4e-463d-8c93-e7ec217d3740 service nova] Acquired lock "refresh_cache-b54dd1f8-2e8d-446d-9145-d034664b7069" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 831.294175] env[62096]: DEBUG nova.network.neutron [req-a1a86112-be95-4a7a-9c99-222d59ab6465 req-a5041c04-af4e-463d-8c93-e7ec217d3740 service nova] [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] Refreshing network info cache for port 33af3186-72fb-4c89-981b-e53007b95853 {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 831.294618] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-a1a86112-be95-4a7a-9c99-222d59ab6465 req-a5041c04-af4e-463d-8c93-e7ec217d3740 service nova] Expecting reply to msg 0687fd8795f74d06ad977a75cde5a7c1 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 831.309364] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-31b37f0f-ed04-4f12-864d-f77713fa80b1 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Expecting reply to msg f6716209fd8f4ee9b1bd132e01b4c32c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 831.310721] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0687fd8795f74d06ad977a75cde5a7c1 [ 831.323632] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f6716209fd8f4ee9b1bd132e01b4c32c [ 831.324978] env[62096]: DEBUG nova.network.neutron [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 831.393054] env[62096]: INFO nova.compute.manager [-] [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] Took 1.03 seconds to deallocate network for instance. [ 831.395795] env[62096]: DEBUG nova.compute.claims [None req-4da4b970-cc40-4c6a-8873-d7e30a4b4978 tempest-ServerMetadataTestJSON-1279504626 tempest-ServerMetadataTestJSON-1279504626-project-member] [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] Aborting claim: {{(pid=62096) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 831.396074] env[62096]: DEBUG oslo_concurrency.lockutils [None req-4da4b970-cc40-4c6a-8873-d7e30a4b4978 tempest-ServerMetadataTestJSON-1279504626 tempest-ServerMetadataTestJSON-1279504626-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 831.405169] env[62096]: DEBUG oslo_concurrency.lockutils [None req-d8bc9aca-755b-4171-8a37-b8947b203c4b tempest-ServerPasswordTestJSON-1799855642 tempest-ServerPasswordTestJSON-1799855642-project-member] Acquiring lock "f3f90842-edaa-42b0-9b21-25a952fc8288" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 831.405411] env[62096]: DEBUG oslo_concurrency.lockutils [None req-d8bc9aca-755b-4171-8a37-b8947b203c4b tempest-ServerPasswordTestJSON-1799855642 tempest-ServerPasswordTestJSON-1799855642-project-member] Lock "f3f90842-edaa-42b0-9b21-25a952fc8288" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 831.411634] env[62096]: DEBUG nova.network.neutron [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 831.412123] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Expecting reply to msg 05e8f11c8bb344be8be7217554808b99 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 831.420135] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 05e8f11c8bb344be8be7217554808b99 [ 831.606566] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Expecting reply to msg f5ebfd34b541424c80fcf854b29bf8d4 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 831.638822] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f5ebfd34b541424c80fcf854b29bf8d4 [ 831.815528] env[62096]: DEBUG nova.network.neutron [req-a1a86112-be95-4a7a-9c99-222d59ab6465 req-a5041c04-af4e-463d-8c93-e7ec217d3740 service nova] [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 831.914391] env[62096]: DEBUG oslo_concurrency.lockutils [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Releasing lock "refresh_cache-4ce3f48e-e45c-4628-8c35-8493c655a6f3" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 831.914603] env[62096]: DEBUG nova.compute.manager [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 831.914782] env[62096]: DEBUG nova.compute.manager [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 831.914944] env[62096]: DEBUG nova.network.neutron [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 831.919891] env[62096]: DEBUG nova.network.neutron [req-a1a86112-be95-4a7a-9c99-222d59ab6465 req-a5041c04-af4e-463d-8c93-e7ec217d3740 service nova] [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 831.920415] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-a1a86112-be95-4a7a-9c99-222d59ab6465 req-a5041c04-af4e-463d-8c93-e7ec217d3740 service nova] Expecting reply to msg edd98c189c6149bbb38868d56dfe9803 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 831.928894] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg edd98c189c6149bbb38868d56dfe9803 [ 831.935160] env[62096]: DEBUG nova.network.neutron [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 831.935751] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Expecting reply to msg ca929f5b5dd24ae2864f58c6c22c7554 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 831.942479] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ca929f5b5dd24ae2864f58c6c22c7554 [ 832.112872] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e97997fc-2afa-4f3c-9ea9-19a3bff4b700 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.120383] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98430db4-90cd-4805-9571-84de84e55dca {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.153384] env[62096]: INFO nova.scheduler.client.report [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Deleted allocations for instance a5b5afc7-abc0-430a-b682-2c1946b4a6d8 [ 832.159308] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f29546ce-7444-4073-9feb-d74a2fd5e4e4 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.162051] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Expecting reply to msg 7628b70e49ea49acbb012baa6dceef91 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 832.168156] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2f0ce81-614c-43f9-aab8-6e5468dd26e3 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.181770] env[62096]: DEBUG nova.compute.provider_tree [None req-31b37f0f-ed04-4f12-864d-f77713fa80b1 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 832.182258] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-31b37f0f-ed04-4f12-864d-f77713fa80b1 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Expecting reply to msg ab3a8ab591b741b0aee5c53c3a36fda7 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 832.183293] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7628b70e49ea49acbb012baa6dceef91 [ 832.192229] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ab3a8ab591b741b0aee5c53c3a36fda7 [ 832.423531] env[62096]: DEBUG oslo_concurrency.lockutils [req-a1a86112-be95-4a7a-9c99-222d59ab6465 req-a5041c04-af4e-463d-8c93-e7ec217d3740 service nova] Releasing lock "refresh_cache-b54dd1f8-2e8d-446d-9145-d034664b7069" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 832.423731] env[62096]: DEBUG nova.compute.manager [req-a1a86112-be95-4a7a-9c99-222d59ab6465 req-a5041c04-af4e-463d-8c93-e7ec217d3740 service nova] [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] Received event network-vif-deleted-33af3186-72fb-4c89-981b-e53007b95853 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 832.437957] env[62096]: DEBUG nova.network.neutron [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 832.438477] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Expecting reply to msg e2a58d4a233547b8b14edaf2a12f6c13 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 832.446583] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e2a58d4a233547b8b14edaf2a12f6c13 [ 832.666187] env[62096]: DEBUG oslo_concurrency.lockutils [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Lock "a5b5afc7-abc0-430a-b682-2c1946b4a6d8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 158.373s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 832.666940] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-5ad96e0f-1e23-473c-97f9-ade218cb855a tempest-InstanceActionsTestJSON-629359483 tempest-InstanceActionsTestJSON-629359483-project-member] Expecting reply to msg 9219ebafc2a443d3a60584af67d94841 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 832.675865] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9219ebafc2a443d3a60584af67d94841 [ 832.685257] env[62096]: DEBUG nova.scheduler.client.report [None req-31b37f0f-ed04-4f12-864d-f77713fa80b1 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 832.687577] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-31b37f0f-ed04-4f12-864d-f77713fa80b1 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Expecting reply to msg dfee994e5dbc4c03aabadddc9a251def in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 832.700176] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dfee994e5dbc4c03aabadddc9a251def [ 832.940686] env[62096]: INFO nova.compute.manager [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] [instance: 4ce3f48e-e45c-4628-8c35-8493c655a6f3] Took 1.03 seconds to deallocate network for instance. [ 832.942418] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Expecting reply to msg 70b4e4245cdc4fd18cb491e52a0731ff in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 832.990595] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 70b4e4245cdc4fd18cb491e52a0731ff [ 833.169148] env[62096]: DEBUG nova.compute.manager [None req-5ad96e0f-1e23-473c-97f9-ade218cb855a tempest-InstanceActionsTestJSON-629359483 tempest-InstanceActionsTestJSON-629359483-project-member] [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] Starting instance... {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 833.170892] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-5ad96e0f-1e23-473c-97f9-ade218cb855a tempest-InstanceActionsTestJSON-629359483 tempest-InstanceActionsTestJSON-629359483-project-member] Expecting reply to msg 86efba144a8e46118b2499ddf4bab2a6 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 833.189315] env[62096]: DEBUG oslo_concurrency.lockutils [None req-31b37f0f-ed04-4f12-864d-f77713fa80b1 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.394s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 833.189810] env[62096]: DEBUG nova.compute.manager [None req-31b37f0f-ed04-4f12-864d-f77713fa80b1 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] Start building networks asynchronously for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 833.191873] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-31b37f0f-ed04-4f12-864d-f77713fa80b1 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Expecting reply to msg e0345b523fb14100bbf5536a90e8ad78 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 833.192813] env[62096]: DEBUG oslo_concurrency.lockutils [None req-0c343c31-990f-417d-b18d-e0dbd941468a tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.435s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 833.194412] env[62096]: INFO nova.compute.claims [None req-0c343c31-990f-417d-b18d-e0dbd941468a tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 833.195918] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-0c343c31-990f-417d-b18d-e0dbd941468a tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Expecting reply to msg 3733d7e28f734c6aa9b84ab94926755f in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 833.211785] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 86efba144a8e46118b2499ddf4bab2a6 [ 833.220601] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e0345b523fb14100bbf5536a90e8ad78 [ 833.231960] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3733d7e28f734c6aa9b84ab94926755f [ 833.447214] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Expecting reply to msg 4a74274e8a5744ec917511cdd657bb20 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 833.476064] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4a74274e8a5744ec917511cdd657bb20 [ 833.695883] env[62096]: DEBUG oslo_concurrency.lockutils [None req-5ad96e0f-1e23-473c-97f9-ade218cb855a tempest-InstanceActionsTestJSON-629359483 tempest-InstanceActionsTestJSON-629359483-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 833.698291] env[62096]: DEBUG nova.compute.utils [None req-31b37f0f-ed04-4f12-864d-f77713fa80b1 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Using /dev/sd instead of None {{(pid=62096) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 833.698863] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-31b37f0f-ed04-4f12-864d-f77713fa80b1 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Expecting reply to msg 64a0bf94d9ed42a08fcee876591c0949 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 833.703618] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-0c343c31-990f-417d-b18d-e0dbd941468a tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Expecting reply to msg 25cbea6cb45c4ae7b827fb77b1182606 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 833.703618] env[62096]: DEBUG nova.compute.manager [None req-31b37f0f-ed04-4f12-864d-f77713fa80b1 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] Allocating IP information in the background. {{(pid=62096) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 833.703618] env[62096]: DEBUG nova.network.neutron [None req-31b37f0f-ed04-4f12-864d-f77713fa80b1 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] allocate_for_instance() {{(pid=62096) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 833.717782] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 25cbea6cb45c4ae7b827fb77b1182606 [ 833.717782] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 64a0bf94d9ed42a08fcee876591c0949 [ 833.749171] env[62096]: DEBUG nova.policy [None req-31b37f0f-ed04-4f12-864d-f77713fa80b1 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '480141f764ae4387a7740719160c9ddd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b8386168ae3843e58ae487f59e81fc4f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62096) authorize /opt/stack/nova/nova/policy.py:203}} [ 833.981392] env[62096]: INFO nova.scheduler.client.report [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Deleted allocations for instance 4ce3f48e-e45c-4628-8c35-8493c655a6f3 [ 833.992319] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Expecting reply to msg b994fa951ab44f5f90b10cffc9870d27 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 834.007872] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b994fa951ab44f5f90b10cffc9870d27 [ 834.090878] env[62096]: DEBUG nova.network.neutron [None req-31b37f0f-ed04-4f12-864d-f77713fa80b1 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] Successfully created port: 81da80c1-1e68-4313-8e6e-e7163fdac3d3 {{(pid=62096) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 834.202937] env[62096]: DEBUG nova.compute.manager [None req-31b37f0f-ed04-4f12-864d-f77713fa80b1 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] Start building block device mappings for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 834.204894] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-31b37f0f-ed04-4f12-864d-f77713fa80b1 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Expecting reply to msg 318dddd48e974c40a5cea48114ea3091 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 834.240922] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 318dddd48e974c40a5cea48114ea3091 [ 834.494321] env[62096]: DEBUG oslo_concurrency.lockutils [None req-e2fc6667-d1ca-469f-b3b0-e01d9f74d1ee tempest-MultipleCreateTestJSON-1401046856 tempest-MultipleCreateTestJSON-1401046856-project-member] Lock "4ce3f48e-e45c-4628-8c35-8493c655a6f3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 160.167s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 834.494889] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c8a2d481-d63a-4d02-82f0-f3787fb45049 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Expecting reply to msg 8a2908e92c8b446d8cab0e90fd0f30d4 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 834.508431] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8a2908e92c8b446d8cab0e90fd0f30d4 [ 834.519151] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91a1f56d-0061-40a1-9363-75ecddc680ca {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.529407] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd543356-c198-4047-a0d1-8fc23c8bd9b2 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.566364] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ad0e05c-25fa-409c-8360-bfbcb79dd105 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.577761] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-587030a2-b322-4e64-ab61-8f73053a8295 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.597892] env[62096]: DEBUG nova.compute.provider_tree [None req-0c343c31-990f-417d-b18d-e0dbd941468a tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 834.598847] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-0c343c31-990f-417d-b18d-e0dbd941468a tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Expecting reply to msg 76d585884d3e40b2a5309d1b94d50f11 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 834.605892] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 76d585884d3e40b2a5309d1b94d50f11 [ 834.712894] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-31b37f0f-ed04-4f12-864d-f77713fa80b1 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Expecting reply to msg e24ed145f31a4177884e83e7ca0ee270 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 834.746216] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e24ed145f31a4177884e83e7ca0ee270 [ 834.863463] env[62096]: DEBUG nova.compute.manager [req-a2ce42fb-daad-4af9-9fe0-21aba99f6ee8 req-460d4057-bfc0-46d4-8af4-cccbee0d28df service nova] [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] Received event network-changed-81da80c1-1e68-4313-8e6e-e7163fdac3d3 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 834.863660] env[62096]: DEBUG nova.compute.manager [req-a2ce42fb-daad-4af9-9fe0-21aba99f6ee8 req-460d4057-bfc0-46d4-8af4-cccbee0d28df service nova] [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] Refreshing instance network info cache due to event network-changed-81da80c1-1e68-4313-8e6e-e7163fdac3d3. {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 834.863875] env[62096]: DEBUG oslo_concurrency.lockutils [req-a2ce42fb-daad-4af9-9fe0-21aba99f6ee8 req-460d4057-bfc0-46d4-8af4-cccbee0d28df service nova] Acquiring lock "refresh_cache-d5a6c627-bbab-49d4-a3bd-cb5b15264b18" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 834.864061] env[62096]: DEBUG oslo_concurrency.lockutils [req-a2ce42fb-daad-4af9-9fe0-21aba99f6ee8 req-460d4057-bfc0-46d4-8af4-cccbee0d28df service nova] Acquired lock "refresh_cache-d5a6c627-bbab-49d4-a3bd-cb5b15264b18" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 834.864232] env[62096]: DEBUG nova.network.neutron [req-a2ce42fb-daad-4af9-9fe0-21aba99f6ee8 req-460d4057-bfc0-46d4-8af4-cccbee0d28df service nova] [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] Refreshing network info cache for port 81da80c1-1e68-4313-8e6e-e7163fdac3d3 {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 834.864698] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-a2ce42fb-daad-4af9-9fe0-21aba99f6ee8 req-460d4057-bfc0-46d4-8af4-cccbee0d28df service nova] Expecting reply to msg 3a377b559a0845e1898943393d50a586 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 834.871020] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3a377b559a0845e1898943393d50a586 [ 834.996803] env[62096]: DEBUG nova.compute.manager [None req-c8a2d481-d63a-4d02-82f0-f3787fb45049 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] [instance: a7838568-9d47-4306-8bb6-9ad74ab1feb3] Starting instance... {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 834.998480] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c8a2d481-d63a-4d02-82f0-f3787fb45049 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Expecting reply to msg b618e837b2924666bf264c9bae3e8f1c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 835.008343] env[62096]: ERROR nova.compute.manager [None req-31b37f0f-ed04-4f12-864d-f77713fa80b1 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 81da80c1-1e68-4313-8e6e-e7163fdac3d3, please check neutron logs for more information. [ 835.008343] env[62096]: ERROR nova.compute.manager Traceback (most recent call last): [ 835.008343] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 835.008343] env[62096]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 835.008343] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 835.008343] env[62096]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 835.008343] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 835.008343] env[62096]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 835.008343] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 835.008343] env[62096]: ERROR nova.compute.manager self.force_reraise() [ 835.008343] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 835.008343] env[62096]: ERROR nova.compute.manager raise self.value [ 835.008343] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 835.008343] env[62096]: ERROR nova.compute.manager updated_port = self._update_port( [ 835.008343] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 835.008343] env[62096]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 835.008855] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 835.008855] env[62096]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 835.008855] env[62096]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 81da80c1-1e68-4313-8e6e-e7163fdac3d3, please check neutron logs for more information. [ 835.008855] env[62096]: ERROR nova.compute.manager [ 835.008855] env[62096]: Traceback (most recent call last): [ 835.008855] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 835.008855] env[62096]: listener.cb(fileno) [ 835.008855] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 835.008855] env[62096]: result = function(*args, **kwargs) [ 835.008855] env[62096]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 835.008855] env[62096]: return func(*args, **kwargs) [ 835.008855] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 835.008855] env[62096]: raise e [ 835.008855] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 835.008855] env[62096]: nwinfo = self.network_api.allocate_for_instance( [ 835.008855] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 835.008855] env[62096]: created_port_ids = self._update_ports_for_instance( [ 835.008855] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 835.008855] env[62096]: with excutils.save_and_reraise_exception(): [ 835.008855] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 835.008855] env[62096]: self.force_reraise() [ 835.008855] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 835.008855] env[62096]: raise self.value [ 835.008855] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 835.008855] env[62096]: updated_port = self._update_port( [ 835.008855] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 835.008855] env[62096]: _ensure_no_port_binding_failure(port) [ 835.008855] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 835.008855] env[62096]: raise exception.PortBindingFailed(port_id=port['id']) [ 835.009697] env[62096]: nova.exception.PortBindingFailed: Binding failed for port 81da80c1-1e68-4313-8e6e-e7163fdac3d3, please check neutron logs for more information. [ 835.009697] env[62096]: Removing descriptor: 14 [ 835.055454] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b618e837b2924666bf264c9bae3e8f1c [ 835.101403] env[62096]: DEBUG nova.scheduler.client.report [None req-0c343c31-990f-417d-b18d-e0dbd941468a tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 835.103846] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-0c343c31-990f-417d-b18d-e0dbd941468a tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Expecting reply to msg 005f080c3ded45688750820cfba853b2 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 835.116671] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 005f080c3ded45688750820cfba853b2 [ 835.228925] env[62096]: DEBUG nova.compute.manager [None req-31b37f0f-ed04-4f12-864d-f77713fa80b1 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] Start spawning the instance on the hypervisor. {{(pid=62096) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 835.253428] env[62096]: DEBUG nova.virt.hardware [None req-31b37f0f-ed04-4f12-864d-f77713fa80b1 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-19T11:52:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-19T11:51:55Z,direct_url=,disk_format='vmdk',id=fb764baa-3805-45c4-a694-aa91b0932110,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bd76085d004b408bbcf1ebf76ba2593e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-19T11:51:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 835.253688] env[62096]: DEBUG nova.virt.hardware [None req-31b37f0f-ed04-4f12-864d-f77713fa80b1 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Flavor limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 835.253843] env[62096]: DEBUG nova.virt.hardware [None req-31b37f0f-ed04-4f12-864d-f77713fa80b1 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Image limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 835.254091] env[62096]: DEBUG nova.virt.hardware [None req-31b37f0f-ed04-4f12-864d-f77713fa80b1 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Flavor pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 835.254303] env[62096]: DEBUG nova.virt.hardware [None req-31b37f0f-ed04-4f12-864d-f77713fa80b1 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Image pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 835.254481] env[62096]: DEBUG nova.virt.hardware [None req-31b37f0f-ed04-4f12-864d-f77713fa80b1 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 835.254689] env[62096]: DEBUG nova.virt.hardware [None req-31b37f0f-ed04-4f12-864d-f77713fa80b1 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 835.254847] env[62096]: DEBUG nova.virt.hardware [None req-31b37f0f-ed04-4f12-864d-f77713fa80b1 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 835.255019] env[62096]: DEBUG nova.virt.hardware [None req-31b37f0f-ed04-4f12-864d-f77713fa80b1 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Got 1 possible topologies {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 835.255207] env[62096]: DEBUG nova.virt.hardware [None req-31b37f0f-ed04-4f12-864d-f77713fa80b1 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 835.255385] env[62096]: DEBUG nova.virt.hardware [None req-31b37f0f-ed04-4f12-864d-f77713fa80b1 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 835.256264] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ab64deb-8081-4272-a2cb-33805334ee95 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.264965] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7ccb0fd-8b0f-4b3e-a232-4901501a2ed8 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.278157] env[62096]: ERROR nova.compute.manager [None req-31b37f0f-ed04-4f12-864d-f77713fa80b1 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 81da80c1-1e68-4313-8e6e-e7163fdac3d3, please check neutron logs for more information. [ 835.278157] env[62096]: ERROR nova.compute.manager [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] Traceback (most recent call last): [ 835.278157] env[62096]: ERROR nova.compute.manager [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 835.278157] env[62096]: ERROR nova.compute.manager [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] yield resources [ 835.278157] env[62096]: ERROR nova.compute.manager [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 835.278157] env[62096]: ERROR nova.compute.manager [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] self.driver.spawn(context, instance, image_meta, [ 835.278157] env[62096]: ERROR nova.compute.manager [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 835.278157] env[62096]: ERROR nova.compute.manager [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] self._vmops.spawn(context, instance, image_meta, injected_files, [ 835.278157] env[62096]: ERROR nova.compute.manager [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 835.278157] env[62096]: ERROR nova.compute.manager [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] vm_ref = self.build_virtual_machine(instance, [ 835.278157] env[62096]: ERROR nova.compute.manager [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 835.278547] env[62096]: ERROR nova.compute.manager [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] vif_infos = vmwarevif.get_vif_info(self._session, [ 835.278547] env[62096]: ERROR nova.compute.manager [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 835.278547] env[62096]: ERROR nova.compute.manager [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] for vif in network_info: [ 835.278547] env[62096]: ERROR nova.compute.manager [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 835.278547] env[62096]: ERROR nova.compute.manager [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] return self._sync_wrapper(fn, *args, **kwargs) [ 835.278547] env[62096]: ERROR nova.compute.manager [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 835.278547] env[62096]: ERROR nova.compute.manager [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] self.wait() [ 835.278547] env[62096]: ERROR nova.compute.manager [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 835.278547] env[62096]: ERROR nova.compute.manager [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] self[:] = self._gt.wait() [ 835.278547] env[62096]: ERROR nova.compute.manager [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 835.278547] env[62096]: ERROR nova.compute.manager [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] return self._exit_event.wait() [ 835.278547] env[62096]: ERROR nova.compute.manager [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 835.278547] env[62096]: ERROR nova.compute.manager [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] current.throw(*self._exc) [ 835.278853] env[62096]: ERROR nova.compute.manager [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 835.278853] env[62096]: ERROR nova.compute.manager [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] result = function(*args, **kwargs) [ 835.278853] env[62096]: ERROR nova.compute.manager [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 835.278853] env[62096]: ERROR nova.compute.manager [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] return func(*args, **kwargs) [ 835.278853] env[62096]: ERROR nova.compute.manager [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 835.278853] env[62096]: ERROR nova.compute.manager [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] raise e [ 835.278853] env[62096]: ERROR nova.compute.manager [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 835.278853] env[62096]: ERROR nova.compute.manager [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] nwinfo = self.network_api.allocate_for_instance( [ 835.278853] env[62096]: ERROR nova.compute.manager [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 835.278853] env[62096]: ERROR nova.compute.manager [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] created_port_ids = self._update_ports_for_instance( [ 835.278853] env[62096]: ERROR nova.compute.manager [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 835.278853] env[62096]: ERROR nova.compute.manager [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] with excutils.save_and_reraise_exception(): [ 835.278853] env[62096]: ERROR nova.compute.manager [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 835.279150] env[62096]: ERROR nova.compute.manager [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] self.force_reraise() [ 835.279150] env[62096]: ERROR nova.compute.manager [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 835.279150] env[62096]: ERROR nova.compute.manager [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] raise self.value [ 835.279150] env[62096]: ERROR nova.compute.manager [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 835.279150] env[62096]: ERROR nova.compute.manager [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] updated_port = self._update_port( [ 835.279150] env[62096]: ERROR nova.compute.manager [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 835.279150] env[62096]: ERROR nova.compute.manager [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] _ensure_no_port_binding_failure(port) [ 835.279150] env[62096]: ERROR nova.compute.manager [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 835.279150] env[62096]: ERROR nova.compute.manager [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] raise exception.PortBindingFailed(port_id=port['id']) [ 835.279150] env[62096]: ERROR nova.compute.manager [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] nova.exception.PortBindingFailed: Binding failed for port 81da80c1-1e68-4313-8e6e-e7163fdac3d3, please check neutron logs for more information. [ 835.279150] env[62096]: ERROR nova.compute.manager [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] [ 835.279150] env[62096]: INFO nova.compute.manager [None req-31b37f0f-ed04-4f12-864d-f77713fa80b1 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] Terminating instance [ 835.280447] env[62096]: DEBUG oslo_concurrency.lockutils [None req-31b37f0f-ed04-4f12-864d-f77713fa80b1 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Acquiring lock "refresh_cache-d5a6c627-bbab-49d4-a3bd-cb5b15264b18" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 835.384070] env[62096]: DEBUG nova.network.neutron [req-a2ce42fb-daad-4af9-9fe0-21aba99f6ee8 req-460d4057-bfc0-46d4-8af4-cccbee0d28df service nova] [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 835.463912] env[62096]: DEBUG nova.network.neutron [req-a2ce42fb-daad-4af9-9fe0-21aba99f6ee8 req-460d4057-bfc0-46d4-8af4-cccbee0d28df service nova] [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 835.464538] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-a2ce42fb-daad-4af9-9fe0-21aba99f6ee8 req-460d4057-bfc0-46d4-8af4-cccbee0d28df service nova] Expecting reply to msg f21e720b65ee45de87da00d65a831c59 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 835.472618] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f21e720b65ee45de87da00d65a831c59 [ 835.522247] env[62096]: DEBUG oslo_concurrency.lockutils [None req-c8a2d481-d63a-4d02-82f0-f3787fb45049 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 835.609629] env[62096]: DEBUG oslo_concurrency.lockutils [None req-0c343c31-990f-417d-b18d-e0dbd941468a tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.416s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 835.609629] env[62096]: DEBUG nova.compute.manager [None req-0c343c31-990f-417d-b18d-e0dbd941468a tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] Start building networks asynchronously for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 835.611512] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-0c343c31-990f-417d-b18d-e0dbd941468a tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Expecting reply to msg 4cddce2f1553469b94dc6d99ee47627a in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 835.612590] env[62096]: DEBUG oslo_concurrency.lockutils [None req-cd2ebb42-6f90-4faf-9937-567b5002bce1 tempest-ServerMetadataNegativeTestJSON-946662778 tempest-ServerMetadataNegativeTestJSON-946662778-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.359s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 835.614270] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-cd2ebb42-6f90-4faf-9937-567b5002bce1 tempest-ServerMetadataNegativeTestJSON-946662778 tempest-ServerMetadataNegativeTestJSON-946662778-project-member] Expecting reply to msg 3769c3418c034d69a6b94d9b521e6878 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 835.660066] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3769c3418c034d69a6b94d9b521e6878 [ 835.671738] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4cddce2f1553469b94dc6d99ee47627a [ 835.966918] env[62096]: DEBUG oslo_concurrency.lockutils [req-a2ce42fb-daad-4af9-9fe0-21aba99f6ee8 req-460d4057-bfc0-46d4-8af4-cccbee0d28df service nova] Releasing lock "refresh_cache-d5a6c627-bbab-49d4-a3bd-cb5b15264b18" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 835.967423] env[62096]: DEBUG oslo_concurrency.lockutils [None req-31b37f0f-ed04-4f12-864d-f77713fa80b1 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Acquired lock "refresh_cache-d5a6c627-bbab-49d4-a3bd-cb5b15264b18" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 835.967655] env[62096]: DEBUG nova.network.neutron [None req-31b37f0f-ed04-4f12-864d-f77713fa80b1 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 835.968180] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-31b37f0f-ed04-4f12-864d-f77713fa80b1 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Expecting reply to msg c10da32628c54dfbac0131299d264dd9 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 835.976046] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c10da32628c54dfbac0131299d264dd9 [ 836.117829] env[62096]: DEBUG nova.compute.utils [None req-0c343c31-990f-417d-b18d-e0dbd941468a tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Using /dev/sd instead of None {{(pid=62096) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 836.118013] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-0c343c31-990f-417d-b18d-e0dbd941468a tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Expecting reply to msg 9ac4114df6814383b527987870f80bfb in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 836.122976] env[62096]: DEBUG nova.compute.manager [None req-0c343c31-990f-417d-b18d-e0dbd941468a tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] Allocating IP information in the background. {{(pid=62096) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 836.122976] env[62096]: DEBUG nova.network.neutron [None req-0c343c31-990f-417d-b18d-e0dbd941468a tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] allocate_for_instance() {{(pid=62096) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 836.128369] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9ac4114df6814383b527987870f80bfb [ 836.176385] env[62096]: DEBUG nova.policy [None req-0c343c31-990f-417d-b18d-e0dbd941468a tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6f956b78c87f49c29ac1d804316f1896', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '205e7b01639c499baadc35fb26fba6ff', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62096) authorize /opt/stack/nova/nova/policy.py:203}} [ 836.375009] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9c203b7-3f3c-440e-beca-c8db295fb591 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.384401] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4e43270-2cea-448b-9929-74ac2932c00b {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.433772] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78068790-cc76-4e53-b97c-40f5ddff47d3 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.441933] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74fd2fca-3f90-4627-9999-937d8f146e14 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.457433] env[62096]: DEBUG nova.compute.provider_tree [None req-cd2ebb42-6f90-4faf-9937-567b5002bce1 tempest-ServerMetadataNegativeTestJSON-946662778 tempest-ServerMetadataNegativeTestJSON-946662778-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 836.457964] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-cd2ebb42-6f90-4faf-9937-567b5002bce1 tempest-ServerMetadataNegativeTestJSON-946662778 tempest-ServerMetadataNegativeTestJSON-946662778-project-member] Expecting reply to msg 073190c686994964bd7f4e4353379abf in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 836.466701] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 073190c686994964bd7f4e4353379abf [ 836.476168] env[62096]: DEBUG nova.network.neutron [None req-0c343c31-990f-417d-b18d-e0dbd941468a tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] Successfully created port: 24bb1713-462a-4988-824c-abb2ceb0205b {{(pid=62096) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 836.522613] env[62096]: DEBUG nova.network.neutron [None req-31b37f0f-ed04-4f12-864d-f77713fa80b1 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 836.616143] env[62096]: DEBUG nova.network.neutron [None req-31b37f0f-ed04-4f12-864d-f77713fa80b1 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 836.616579] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-31b37f0f-ed04-4f12-864d-f77713fa80b1 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Expecting reply to msg 6a060530d1494006be4da1073c278292 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 836.622297] env[62096]: DEBUG nova.compute.manager [None req-0c343c31-990f-417d-b18d-e0dbd941468a tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] Start building block device mappings for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 836.624603] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-0c343c31-990f-417d-b18d-e0dbd941468a tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Expecting reply to msg 8a14b09e82674f9c82b693a501aba874 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 836.628492] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6a060530d1494006be4da1073c278292 [ 836.659832] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8a14b09e82674f9c82b693a501aba874 [ 836.935635] env[62096]: DEBUG nova.compute.manager [req-0e602b42-c3c9-47d2-8844-55f373bfceeb req-07217366-9467-4aa3-98f2-fd7d4d338655 service nova] [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] Received event network-vif-deleted-81da80c1-1e68-4313-8e6e-e7163fdac3d3 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 836.960136] env[62096]: DEBUG nova.scheduler.client.report [None req-cd2ebb42-6f90-4faf-9937-567b5002bce1 tempest-ServerMetadataNegativeTestJSON-946662778 tempest-ServerMetadataNegativeTestJSON-946662778-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 836.962539] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-cd2ebb42-6f90-4faf-9937-567b5002bce1 tempest-ServerMetadataNegativeTestJSON-946662778 tempest-ServerMetadataNegativeTestJSON-946662778-project-member] Expecting reply to msg f30ebc1e548f4c97bce8ff1150f5b730 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 836.977588] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f30ebc1e548f4c97bce8ff1150f5b730 [ 837.120039] env[62096]: DEBUG oslo_concurrency.lockutils [None req-31b37f0f-ed04-4f12-864d-f77713fa80b1 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Releasing lock "refresh_cache-d5a6c627-bbab-49d4-a3bd-cb5b15264b18" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 837.120039] env[62096]: DEBUG nova.compute.manager [None req-31b37f0f-ed04-4f12-864d-f77713fa80b1 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] Start destroying the instance on the hypervisor. {{(pid=62096) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 837.120039] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-31b37f0f-ed04-4f12-864d-f77713fa80b1 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] Destroying instance {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 837.120039] env[62096]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-425fd151-25e7-42f4-9c66-8888e5fb3d43 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.129406] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22be5221-69b3-4d37-a046-1d1459080664 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.146064] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-0c343c31-990f-417d-b18d-e0dbd941468a tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Expecting reply to msg 547cd78c1cfe4883a5e7c7d869ba50f0 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 837.158424] env[62096]: WARNING nova.virt.vmwareapi.vmops [None req-31b37f0f-ed04-4f12-864d-f77713fa80b1 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d5a6c627-bbab-49d4-a3bd-cb5b15264b18 could not be found. [ 837.158656] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-31b37f0f-ed04-4f12-864d-f77713fa80b1 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] Instance destroyed {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 837.158833] env[62096]: INFO nova.compute.manager [None req-31b37f0f-ed04-4f12-864d-f77713fa80b1 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] Took 0.04 seconds to destroy the instance on the hypervisor. [ 837.159086] env[62096]: DEBUG oslo.service.loopingcall [None req-31b37f0f-ed04-4f12-864d-f77713fa80b1 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 837.159291] env[62096]: DEBUG nova.compute.manager [-] [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 837.159381] env[62096]: DEBUG nova.network.neutron [-] [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 837.173492] env[62096]: DEBUG nova.network.neutron [-] [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 837.174084] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 2498f8bb456a4be4b256a6f900bac301 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 837.180852] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2498f8bb456a4be4b256a6f900bac301 [ 837.186393] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 547cd78c1cfe4883a5e7c7d869ba50f0 [ 837.333534] env[62096]: ERROR nova.compute.manager [None req-0c343c31-990f-417d-b18d-e0dbd941468a tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 24bb1713-462a-4988-824c-abb2ceb0205b, please check neutron logs for more information. [ 837.333534] env[62096]: ERROR nova.compute.manager Traceback (most recent call last): [ 837.333534] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 837.333534] env[62096]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 837.333534] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 837.333534] env[62096]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 837.333534] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 837.333534] env[62096]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 837.333534] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 837.333534] env[62096]: ERROR nova.compute.manager self.force_reraise() [ 837.333534] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 837.333534] env[62096]: ERROR nova.compute.manager raise self.value [ 837.333534] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 837.333534] env[62096]: ERROR nova.compute.manager updated_port = self._update_port( [ 837.333534] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 837.333534] env[62096]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 837.334040] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 837.334040] env[62096]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 837.334040] env[62096]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 24bb1713-462a-4988-824c-abb2ceb0205b, please check neutron logs for more information. [ 837.334040] env[62096]: ERROR nova.compute.manager [ 837.334040] env[62096]: Traceback (most recent call last): [ 837.334040] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 837.334040] env[62096]: listener.cb(fileno) [ 837.334040] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 837.334040] env[62096]: result = function(*args, **kwargs) [ 837.334040] env[62096]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 837.334040] env[62096]: return func(*args, **kwargs) [ 837.334040] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 837.334040] env[62096]: raise e [ 837.334040] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 837.334040] env[62096]: nwinfo = self.network_api.allocate_for_instance( [ 837.334040] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 837.334040] env[62096]: created_port_ids = self._update_ports_for_instance( [ 837.334040] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 837.334040] env[62096]: with excutils.save_and_reraise_exception(): [ 837.334040] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 837.334040] env[62096]: self.force_reraise() [ 837.334040] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 837.334040] env[62096]: raise self.value [ 837.334040] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 837.334040] env[62096]: updated_port = self._update_port( [ 837.334040] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 837.334040] env[62096]: _ensure_no_port_binding_failure(port) [ 837.334040] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 837.334040] env[62096]: raise exception.PortBindingFailed(port_id=port['id']) [ 837.334763] env[62096]: nova.exception.PortBindingFailed: Binding failed for port 24bb1713-462a-4988-824c-abb2ceb0205b, please check neutron logs for more information. [ 837.334763] env[62096]: Removing descriptor: 14 [ 837.465020] env[62096]: DEBUG oslo_concurrency.lockutils [None req-cd2ebb42-6f90-4faf-9937-567b5002bce1 tempest-ServerMetadataNegativeTestJSON-946662778 tempest-ServerMetadataNegativeTestJSON-946662778-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.852s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 837.465700] env[62096]: ERROR nova.compute.manager [None req-cd2ebb42-6f90-4faf-9937-567b5002bce1 tempest-ServerMetadataNegativeTestJSON-946662778 tempest-ServerMetadataNegativeTestJSON-946662778-project-member] [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 3e510cfe-9ee5-4953-a188-b59cb01a93c4, please check neutron logs for more information. [ 837.465700] env[62096]: ERROR nova.compute.manager [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] Traceback (most recent call last): [ 837.465700] env[62096]: ERROR nova.compute.manager [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 837.465700] env[62096]: ERROR nova.compute.manager [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] self.driver.spawn(context, instance, image_meta, [ 837.465700] env[62096]: ERROR nova.compute.manager [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 837.465700] env[62096]: ERROR nova.compute.manager [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] self._vmops.spawn(context, instance, image_meta, injected_files, [ 837.465700] env[62096]: ERROR nova.compute.manager [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 837.465700] env[62096]: ERROR nova.compute.manager [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] vm_ref = self.build_virtual_machine(instance, [ 837.465700] env[62096]: ERROR nova.compute.manager [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 837.465700] env[62096]: ERROR nova.compute.manager [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] vif_infos = vmwarevif.get_vif_info(self._session, [ 837.465700] env[62096]: ERROR nova.compute.manager [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 837.465976] env[62096]: ERROR nova.compute.manager [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] for vif in network_info: [ 837.465976] env[62096]: ERROR nova.compute.manager [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 837.465976] env[62096]: ERROR nova.compute.manager [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] return self._sync_wrapper(fn, *args, **kwargs) [ 837.465976] env[62096]: ERROR nova.compute.manager [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 837.465976] env[62096]: ERROR nova.compute.manager [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] self.wait() [ 837.465976] env[62096]: ERROR nova.compute.manager [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 837.465976] env[62096]: ERROR nova.compute.manager [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] self[:] = self._gt.wait() [ 837.465976] env[62096]: ERROR nova.compute.manager [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 837.465976] env[62096]: ERROR nova.compute.manager [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] return self._exit_event.wait() [ 837.465976] env[62096]: ERROR nova.compute.manager [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 837.465976] env[62096]: ERROR nova.compute.manager [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] result = hub.switch() [ 837.465976] env[62096]: ERROR nova.compute.manager [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 837.465976] env[62096]: ERROR nova.compute.manager [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] return self.greenlet.switch() [ 837.466304] env[62096]: ERROR nova.compute.manager [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 837.466304] env[62096]: ERROR nova.compute.manager [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] result = function(*args, **kwargs) [ 837.466304] env[62096]: ERROR nova.compute.manager [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 837.466304] env[62096]: ERROR nova.compute.manager [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] return func(*args, **kwargs) [ 837.466304] env[62096]: ERROR nova.compute.manager [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 837.466304] env[62096]: ERROR nova.compute.manager [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] raise e [ 837.466304] env[62096]: ERROR nova.compute.manager [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 837.466304] env[62096]: ERROR nova.compute.manager [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] nwinfo = self.network_api.allocate_for_instance( [ 837.466304] env[62096]: ERROR nova.compute.manager [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 837.466304] env[62096]: ERROR nova.compute.manager [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] created_port_ids = self._update_ports_for_instance( [ 837.466304] env[62096]: ERROR nova.compute.manager [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 837.466304] env[62096]: ERROR nova.compute.manager [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] with excutils.save_and_reraise_exception(): [ 837.466304] env[62096]: ERROR nova.compute.manager [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 837.466629] env[62096]: ERROR nova.compute.manager [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] self.force_reraise() [ 837.466629] env[62096]: ERROR nova.compute.manager [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 837.466629] env[62096]: ERROR nova.compute.manager [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] raise self.value [ 837.466629] env[62096]: ERROR nova.compute.manager [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 837.466629] env[62096]: ERROR nova.compute.manager [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] updated_port = self._update_port( [ 837.466629] env[62096]: ERROR nova.compute.manager [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 837.466629] env[62096]: ERROR nova.compute.manager [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] _ensure_no_port_binding_failure(port) [ 837.466629] env[62096]: ERROR nova.compute.manager [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 837.466629] env[62096]: ERROR nova.compute.manager [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] raise exception.PortBindingFailed(port_id=port['id']) [ 837.466629] env[62096]: ERROR nova.compute.manager [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] nova.exception.PortBindingFailed: Binding failed for port 3e510cfe-9ee5-4953-a188-b59cb01a93c4, please check neutron logs for more information. [ 837.466629] env[62096]: ERROR nova.compute.manager [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] [ 837.466922] env[62096]: DEBUG nova.compute.utils [None req-cd2ebb42-6f90-4faf-9937-567b5002bce1 tempest-ServerMetadataNegativeTestJSON-946662778 tempest-ServerMetadataNegativeTestJSON-946662778-project-member] [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] Binding failed for port 3e510cfe-9ee5-4953-a188-b59cb01a93c4, please check neutron logs for more information. {{(pid=62096) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 837.467632] env[62096]: DEBUG oslo_concurrency.lockutils [None req-265784be-d456-4382-9cd2-a751a291282e tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.914s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 837.469212] env[62096]: INFO nova.compute.claims [None req-265784be-d456-4382-9cd2-a751a291282e tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 837.470879] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-265784be-d456-4382-9cd2-a751a291282e tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg a9e4607d66fc4bda8a5bbfa74e6c890f in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 837.472253] env[62096]: DEBUG nova.compute.manager [None req-cd2ebb42-6f90-4faf-9937-567b5002bce1 tempest-ServerMetadataNegativeTestJSON-946662778 tempest-ServerMetadataNegativeTestJSON-946662778-project-member] [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] Build of instance 7a85b800-725c-4d91-90bd-2056eb2fb116 was re-scheduled: Binding failed for port 3e510cfe-9ee5-4953-a188-b59cb01a93c4, please check neutron logs for more information. {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 837.472736] env[62096]: DEBUG nova.compute.manager [None req-cd2ebb42-6f90-4faf-9937-567b5002bce1 tempest-ServerMetadataNegativeTestJSON-946662778 tempest-ServerMetadataNegativeTestJSON-946662778-project-member] [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] Unplugging VIFs for instance {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 837.472992] env[62096]: DEBUG oslo_concurrency.lockutils [None req-cd2ebb42-6f90-4faf-9937-567b5002bce1 tempest-ServerMetadataNegativeTestJSON-946662778 tempest-ServerMetadataNegativeTestJSON-946662778-project-member] Acquiring lock "refresh_cache-7a85b800-725c-4d91-90bd-2056eb2fb116" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 837.473177] env[62096]: DEBUG oslo_concurrency.lockutils [None req-cd2ebb42-6f90-4faf-9937-567b5002bce1 tempest-ServerMetadataNegativeTestJSON-946662778 tempest-ServerMetadataNegativeTestJSON-946662778-project-member] Acquired lock "refresh_cache-7a85b800-725c-4d91-90bd-2056eb2fb116" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 837.473369] env[62096]: DEBUG nova.network.neutron [None req-cd2ebb42-6f90-4faf-9937-567b5002bce1 tempest-ServerMetadataNegativeTestJSON-946662778 tempest-ServerMetadataNegativeTestJSON-946662778-project-member] [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 837.473757] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-cd2ebb42-6f90-4faf-9937-567b5002bce1 tempest-ServerMetadataNegativeTestJSON-946662778 tempest-ServerMetadataNegativeTestJSON-946662778-project-member] Expecting reply to msg f37f3d231cad4b3180d4a06855af621e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 837.487556] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f37f3d231cad4b3180d4a06855af621e [ 837.505375] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a9e4607d66fc4bda8a5bbfa74e6c890f [ 837.649759] env[62096]: DEBUG nova.compute.manager [None req-0c343c31-990f-417d-b18d-e0dbd941468a tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] Start spawning the instance on the hypervisor. {{(pid=62096) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 837.674437] env[62096]: DEBUG nova.virt.hardware [None req-0c343c31-990f-417d-b18d-e0dbd941468a tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-19T11:52:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-19T11:51:55Z,direct_url=,disk_format='vmdk',id=fb764baa-3805-45c4-a694-aa91b0932110,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bd76085d004b408bbcf1ebf76ba2593e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-19T11:51:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 837.674697] env[62096]: DEBUG nova.virt.hardware [None req-0c343c31-990f-417d-b18d-e0dbd941468a tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Flavor limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 837.674927] env[62096]: DEBUG nova.virt.hardware [None req-0c343c31-990f-417d-b18d-e0dbd941468a tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Image limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 837.675135] env[62096]: DEBUG nova.virt.hardware [None req-0c343c31-990f-417d-b18d-e0dbd941468a tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Flavor pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 837.675278] env[62096]: DEBUG nova.virt.hardware [None req-0c343c31-990f-417d-b18d-e0dbd941468a tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Image pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 837.675418] env[62096]: DEBUG nova.virt.hardware [None req-0c343c31-990f-417d-b18d-e0dbd941468a tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 837.675616] env[62096]: DEBUG nova.virt.hardware [None req-0c343c31-990f-417d-b18d-e0dbd941468a tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 837.675848] env[62096]: DEBUG nova.virt.hardware [None req-0c343c31-990f-417d-b18d-e0dbd941468a tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 837.675938] env[62096]: DEBUG nova.virt.hardware [None req-0c343c31-990f-417d-b18d-e0dbd941468a tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Got 1 possible topologies {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 837.676091] env[62096]: DEBUG nova.virt.hardware [None req-0c343c31-990f-417d-b18d-e0dbd941468a tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 837.676269] env[62096]: DEBUG nova.virt.hardware [None req-0c343c31-990f-417d-b18d-e0dbd941468a tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 837.676608] env[62096]: DEBUG nova.network.neutron [-] [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 837.677179] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 5e15a2dbfddd4401a9df1a238647314a in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 837.678290] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c831e6d-3981-425a-94c4-5e61818ba7b4 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.686300] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5e15a2dbfddd4401a9df1a238647314a [ 837.687526] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8bd476e-8dda-4814-be40-0ab57e100b2b {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.701681] env[62096]: ERROR nova.compute.manager [None req-0c343c31-990f-417d-b18d-e0dbd941468a tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 24bb1713-462a-4988-824c-abb2ceb0205b, please check neutron logs for more information. [ 837.701681] env[62096]: ERROR nova.compute.manager [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] Traceback (most recent call last): [ 837.701681] env[62096]: ERROR nova.compute.manager [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 837.701681] env[62096]: ERROR nova.compute.manager [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] yield resources [ 837.701681] env[62096]: ERROR nova.compute.manager [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 837.701681] env[62096]: ERROR nova.compute.manager [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] self.driver.spawn(context, instance, image_meta, [ 837.701681] env[62096]: ERROR nova.compute.manager [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 837.701681] env[62096]: ERROR nova.compute.manager [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 837.701681] env[62096]: ERROR nova.compute.manager [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 837.701681] env[62096]: ERROR nova.compute.manager [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] vm_ref = self.build_virtual_machine(instance, [ 837.701681] env[62096]: ERROR nova.compute.manager [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 837.702099] env[62096]: ERROR nova.compute.manager [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] vif_infos = vmwarevif.get_vif_info(self._session, [ 837.702099] env[62096]: ERROR nova.compute.manager [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 837.702099] env[62096]: ERROR nova.compute.manager [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] for vif in network_info: [ 837.702099] env[62096]: ERROR nova.compute.manager [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 837.702099] env[62096]: ERROR nova.compute.manager [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] return self._sync_wrapper(fn, *args, **kwargs) [ 837.702099] env[62096]: ERROR nova.compute.manager [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 837.702099] env[62096]: ERROR nova.compute.manager [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] self.wait() [ 837.702099] env[62096]: ERROR nova.compute.manager [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 837.702099] env[62096]: ERROR nova.compute.manager [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] self[:] = self._gt.wait() [ 837.702099] env[62096]: ERROR nova.compute.manager [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 837.702099] env[62096]: ERROR nova.compute.manager [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] return self._exit_event.wait() [ 837.702099] env[62096]: ERROR nova.compute.manager [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 837.702099] env[62096]: ERROR nova.compute.manager [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] current.throw(*self._exc) [ 837.702431] env[62096]: ERROR nova.compute.manager [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 837.702431] env[62096]: ERROR nova.compute.manager [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] result = function(*args, **kwargs) [ 837.702431] env[62096]: ERROR nova.compute.manager [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 837.702431] env[62096]: ERROR nova.compute.manager [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] return func(*args, **kwargs) [ 837.702431] env[62096]: ERROR nova.compute.manager [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 837.702431] env[62096]: ERROR nova.compute.manager [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] raise e [ 837.702431] env[62096]: ERROR nova.compute.manager [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 837.702431] env[62096]: ERROR nova.compute.manager [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] nwinfo = self.network_api.allocate_for_instance( [ 837.702431] env[62096]: ERROR nova.compute.manager [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 837.702431] env[62096]: ERROR nova.compute.manager [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] created_port_ids = self._update_ports_for_instance( [ 837.702431] env[62096]: ERROR nova.compute.manager [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 837.702431] env[62096]: ERROR nova.compute.manager [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] with excutils.save_and_reraise_exception(): [ 837.702431] env[62096]: ERROR nova.compute.manager [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 837.702761] env[62096]: ERROR nova.compute.manager [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] self.force_reraise() [ 837.702761] env[62096]: ERROR nova.compute.manager [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 837.702761] env[62096]: ERROR nova.compute.manager [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] raise self.value [ 837.702761] env[62096]: ERROR nova.compute.manager [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 837.702761] env[62096]: ERROR nova.compute.manager [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] updated_port = self._update_port( [ 837.702761] env[62096]: ERROR nova.compute.manager [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 837.702761] env[62096]: ERROR nova.compute.manager [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] _ensure_no_port_binding_failure(port) [ 837.702761] env[62096]: ERROR nova.compute.manager [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 837.702761] env[62096]: ERROR nova.compute.manager [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] raise exception.PortBindingFailed(port_id=port['id']) [ 837.702761] env[62096]: ERROR nova.compute.manager [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] nova.exception.PortBindingFailed: Binding failed for port 24bb1713-462a-4988-824c-abb2ceb0205b, please check neutron logs for more information. [ 837.702761] env[62096]: ERROR nova.compute.manager [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] [ 837.702761] env[62096]: INFO nova.compute.manager [None req-0c343c31-990f-417d-b18d-e0dbd941468a tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] Terminating instance [ 837.703942] env[62096]: DEBUG oslo_concurrency.lockutils [None req-0c343c31-990f-417d-b18d-e0dbd941468a tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Acquiring lock "refresh_cache-4057df30-37e2-415d-b6d2-e4211b95863d" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 837.704174] env[62096]: DEBUG oslo_concurrency.lockutils [None req-0c343c31-990f-417d-b18d-e0dbd941468a tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Acquired lock "refresh_cache-4057df30-37e2-415d-b6d2-e4211b95863d" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 837.704405] env[62096]: DEBUG nova.network.neutron [None req-0c343c31-990f-417d-b18d-e0dbd941468a tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 837.705279] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-0c343c31-990f-417d-b18d-e0dbd941468a tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Expecting reply to msg 10c7cdf8eb3045d390a37f512c8e9ead in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 837.712316] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 10c7cdf8eb3045d390a37f512c8e9ead [ 837.977297] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-265784be-d456-4382-9cd2-a751a291282e tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg 2f62d78e39f44121a029e94343f1d359 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 837.987721] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2f62d78e39f44121a029e94343f1d359 [ 838.011604] env[62096]: DEBUG nova.network.neutron [None req-cd2ebb42-6f90-4faf-9937-567b5002bce1 tempest-ServerMetadataNegativeTestJSON-946662778 tempest-ServerMetadataNegativeTestJSON-946662778-project-member] [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 838.139726] env[62096]: DEBUG nova.network.neutron [None req-cd2ebb42-6f90-4faf-9937-567b5002bce1 tempest-ServerMetadataNegativeTestJSON-946662778 tempest-ServerMetadataNegativeTestJSON-946662778-project-member] [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 838.140365] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-cd2ebb42-6f90-4faf-9937-567b5002bce1 tempest-ServerMetadataNegativeTestJSON-946662778 tempest-ServerMetadataNegativeTestJSON-946662778-project-member] Expecting reply to msg 230c5dcc544343cc85375c0d22de8aca in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 838.149822] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 230c5dcc544343cc85375c0d22de8aca [ 838.181936] env[62096]: INFO nova.compute.manager [-] [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] Took 1.02 seconds to deallocate network for instance. [ 838.184445] env[62096]: DEBUG nova.compute.claims [None req-31b37f0f-ed04-4f12-864d-f77713fa80b1 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] Aborting claim: {{(pid=62096) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 838.184617] env[62096]: DEBUG oslo_concurrency.lockutils [None req-31b37f0f-ed04-4f12-864d-f77713fa80b1 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 838.232881] env[62096]: DEBUG nova.network.neutron [None req-0c343c31-990f-417d-b18d-e0dbd941468a tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 838.330627] env[62096]: DEBUG nova.network.neutron [None req-0c343c31-990f-417d-b18d-e0dbd941468a tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 838.331179] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-0c343c31-990f-417d-b18d-e0dbd941468a tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Expecting reply to msg 12fa40c2634647cb949131b6fc9d881a in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 838.338890] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 12fa40c2634647cb949131b6fc9d881a [ 838.642609] env[62096]: DEBUG oslo_concurrency.lockutils [None req-cd2ebb42-6f90-4faf-9937-567b5002bce1 tempest-ServerMetadataNegativeTestJSON-946662778 tempest-ServerMetadataNegativeTestJSON-946662778-project-member] Releasing lock "refresh_cache-7a85b800-725c-4d91-90bd-2056eb2fb116" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 838.643439] env[62096]: DEBUG nova.compute.manager [None req-cd2ebb42-6f90-4faf-9937-567b5002bce1 tempest-ServerMetadataNegativeTestJSON-946662778 tempest-ServerMetadataNegativeTestJSON-946662778-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 838.643659] env[62096]: DEBUG nova.compute.manager [None req-cd2ebb42-6f90-4faf-9937-567b5002bce1 tempest-ServerMetadataNegativeTestJSON-946662778 tempest-ServerMetadataNegativeTestJSON-946662778-project-member] [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 838.643980] env[62096]: DEBUG nova.network.neutron [None req-cd2ebb42-6f90-4faf-9937-567b5002bce1 tempest-ServerMetadataNegativeTestJSON-946662778 tempest-ServerMetadataNegativeTestJSON-946662778-project-member] [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 838.671355] env[62096]: DEBUG nova.network.neutron [None req-cd2ebb42-6f90-4faf-9937-567b5002bce1 tempest-ServerMetadataNegativeTestJSON-946662778 tempest-ServerMetadataNegativeTestJSON-946662778-project-member] [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 838.672108] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-cd2ebb42-6f90-4faf-9937-567b5002bce1 tempest-ServerMetadataNegativeTestJSON-946662778 tempest-ServerMetadataNegativeTestJSON-946662778-project-member] Expecting reply to msg 4479be386e204afbb61f79623825735c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 838.679334] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4479be386e204afbb61f79623825735c [ 838.790272] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be5ec9b8-28c7-4a95-9de9-9d3bb75c688f {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.801825] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36b953a8-ed6b-4090-8ccf-16dbf5737099 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.839487] env[62096]: DEBUG oslo_concurrency.lockutils [None req-0c343c31-990f-417d-b18d-e0dbd941468a tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Releasing lock "refresh_cache-4057df30-37e2-415d-b6d2-e4211b95863d" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 838.840143] env[62096]: DEBUG nova.compute.manager [None req-0c343c31-990f-417d-b18d-e0dbd941468a tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] Start destroying the instance on the hypervisor. {{(pid=62096) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 838.840522] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-0c343c31-990f-417d-b18d-e0dbd941468a tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] Destroying instance {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 838.841116] env[62096]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-68507d43-8d9f-4e6e-9f68-3fa459b2521e {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.843899] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bddb7359-6f88-44da-8caa-eec3e9cae3ab {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.853409] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e80ba3e-1de0-408f-a57d-a29ed0e22e18 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.861879] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-899d9dea-aee9-4781-9617-1b254af40451 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.889667] env[62096]: DEBUG nova.compute.provider_tree [None req-265784be-d456-4382-9cd2-a751a291282e tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 838.890398] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-265784be-d456-4382-9cd2-a751a291282e tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg bbc0f4730c304a71b14c9390f3ba414d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 838.895721] env[62096]: WARNING nova.virt.vmwareapi.vmops [None req-0c343c31-990f-417d-b18d-e0dbd941468a tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 4057df30-37e2-415d-b6d2-e4211b95863d could not be found. [ 838.896097] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-0c343c31-990f-417d-b18d-e0dbd941468a tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] Instance destroyed {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 838.896410] env[62096]: INFO nova.compute.manager [None req-0c343c31-990f-417d-b18d-e0dbd941468a tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] Took 0.06 seconds to destroy the instance on the hypervisor. [ 838.896768] env[62096]: DEBUG oslo.service.loopingcall [None req-0c343c31-990f-417d-b18d-e0dbd941468a tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 838.897434] env[62096]: DEBUG nova.compute.manager [-] [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 838.897647] env[62096]: DEBUG nova.network.neutron [-] [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 838.899675] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bbc0f4730c304a71b14c9390f3ba414d [ 838.916238] env[62096]: DEBUG nova.network.neutron [-] [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 838.916915] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 7dba1c99bfb044eeaf4813cddf647d92 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 838.932505] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7dba1c99bfb044eeaf4813cddf647d92 [ 839.014259] env[62096]: DEBUG nova.compute.manager [req-55fdd315-48a8-4df3-9a06-d5a06c9a5ec1 req-12e0e865-dda7-42e8-ad39-6df0c7270529 service nova] [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] Received event network-changed-24bb1713-462a-4988-824c-abb2ceb0205b {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 839.014591] env[62096]: DEBUG nova.compute.manager [req-55fdd315-48a8-4df3-9a06-d5a06c9a5ec1 req-12e0e865-dda7-42e8-ad39-6df0c7270529 service nova] [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] Refreshing instance network info cache due to event network-changed-24bb1713-462a-4988-824c-abb2ceb0205b. {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 839.014902] env[62096]: DEBUG oslo_concurrency.lockutils [req-55fdd315-48a8-4df3-9a06-d5a06c9a5ec1 req-12e0e865-dda7-42e8-ad39-6df0c7270529 service nova] Acquiring lock "refresh_cache-4057df30-37e2-415d-b6d2-e4211b95863d" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 839.015310] env[62096]: DEBUG oslo_concurrency.lockutils [req-55fdd315-48a8-4df3-9a06-d5a06c9a5ec1 req-12e0e865-dda7-42e8-ad39-6df0c7270529 service nova] Acquired lock "refresh_cache-4057df30-37e2-415d-b6d2-e4211b95863d" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 839.015848] env[62096]: DEBUG nova.network.neutron [req-55fdd315-48a8-4df3-9a06-d5a06c9a5ec1 req-12e0e865-dda7-42e8-ad39-6df0c7270529 service nova] [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] Refreshing network info cache for port 24bb1713-462a-4988-824c-abb2ceb0205b {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 839.017061] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-55fdd315-48a8-4df3-9a06-d5a06c9a5ec1 req-12e0e865-dda7-42e8-ad39-6df0c7270529 service nova] Expecting reply to msg c05632c0c1324bfea8fa792c17837153 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 839.024728] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c05632c0c1324bfea8fa792c17837153 [ 839.174707] env[62096]: DEBUG nova.network.neutron [None req-cd2ebb42-6f90-4faf-9937-567b5002bce1 tempest-ServerMetadataNegativeTestJSON-946662778 tempest-ServerMetadataNegativeTestJSON-946662778-project-member] [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 839.175588] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-cd2ebb42-6f90-4faf-9937-567b5002bce1 tempest-ServerMetadataNegativeTestJSON-946662778 tempest-ServerMetadataNegativeTestJSON-946662778-project-member] Expecting reply to msg f868b71689554f35bcfd1cdcbc22f1ab in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 839.183884] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f868b71689554f35bcfd1cdcbc22f1ab [ 839.399267] env[62096]: DEBUG nova.scheduler.client.report [None req-265784be-d456-4382-9cd2-a751a291282e tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 839.402980] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-265784be-d456-4382-9cd2-a751a291282e tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg 6216450a86bf403b81055cd9b1ac04c4 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 839.416604] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6216450a86bf403b81055cd9b1ac04c4 [ 839.419554] env[62096]: DEBUG nova.network.neutron [-] [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 839.420862] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 7f41baad3d8847a380b5844eb33f51cb in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 839.429717] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7f41baad3d8847a380b5844eb33f51cb [ 839.533979] env[62096]: DEBUG nova.network.neutron [req-55fdd315-48a8-4df3-9a06-d5a06c9a5ec1 req-12e0e865-dda7-42e8-ad39-6df0c7270529 service nova] [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 839.622347] env[62096]: DEBUG nova.network.neutron [req-55fdd315-48a8-4df3-9a06-d5a06c9a5ec1 req-12e0e865-dda7-42e8-ad39-6df0c7270529 service nova] [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 839.622860] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-55fdd315-48a8-4df3-9a06-d5a06c9a5ec1 req-12e0e865-dda7-42e8-ad39-6df0c7270529 service nova] Expecting reply to msg 05f7b26d4c924df8b313b5e2fb1ba203 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 839.634485] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 05f7b26d4c924df8b313b5e2fb1ba203 [ 839.678229] env[62096]: INFO nova.compute.manager [None req-cd2ebb42-6f90-4faf-9937-567b5002bce1 tempest-ServerMetadataNegativeTestJSON-946662778 tempest-ServerMetadataNegativeTestJSON-946662778-project-member] [instance: 7a85b800-725c-4d91-90bd-2056eb2fb116] Took 1.03 seconds to deallocate network for instance. [ 839.679956] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-cd2ebb42-6f90-4faf-9937-567b5002bce1 tempest-ServerMetadataNegativeTestJSON-946662778 tempest-ServerMetadataNegativeTestJSON-946662778-project-member] Expecting reply to msg 28565a8f8601427d87c54e560c87158c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 839.736540] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 28565a8f8601427d87c54e560c87158c [ 839.907230] env[62096]: DEBUG oslo_concurrency.lockutils [None req-265784be-d456-4382-9cd2-a751a291282e tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.439s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 839.907737] env[62096]: DEBUG nova.compute.manager [None req-265784be-d456-4382-9cd2-a751a291282e tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] Start building networks asynchronously for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 839.909509] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-265784be-d456-4382-9cd2-a751a291282e tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg b88a1dd832434b3a9c3d813bb868d047 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 839.910569] env[62096]: DEBUG oslo_concurrency.lockutils [None req-6f6a01ff-c14d-41bb-a04a-2330ef16cae3 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.594s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 839.911930] env[62096]: INFO nova.compute.claims [None req-6f6a01ff-c14d-41bb-a04a-2330ef16cae3 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] [instance: 250ef7e7-266b-451d-8627-9cce211d4e83] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 839.921104] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-6f6a01ff-c14d-41bb-a04a-2330ef16cae3 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Expecting reply to msg ecdcc513e7d24763887ef907f256d16c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 839.923721] env[62096]: INFO nova.compute.manager [-] [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] Took 1.03 seconds to deallocate network for instance. [ 839.929583] env[62096]: DEBUG nova.compute.claims [None req-0c343c31-990f-417d-b18d-e0dbd941468a tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] Aborting claim: {{(pid=62096) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 839.929739] env[62096]: DEBUG oslo_concurrency.lockutils [None req-0c343c31-990f-417d-b18d-e0dbd941468a tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 839.942114] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b88a1dd832434b3a9c3d813bb868d047 [ 839.957829] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ecdcc513e7d24763887ef907f256d16c [ 840.125057] env[62096]: DEBUG oslo_concurrency.lockutils [req-55fdd315-48a8-4df3-9a06-d5a06c9a5ec1 req-12e0e865-dda7-42e8-ad39-6df0c7270529 service nova] Releasing lock "refresh_cache-4057df30-37e2-415d-b6d2-e4211b95863d" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 840.125335] env[62096]: DEBUG nova.compute.manager [req-55fdd315-48a8-4df3-9a06-d5a06c9a5ec1 req-12e0e865-dda7-42e8-ad39-6df0c7270529 service nova] [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] Received event network-vif-deleted-24bb1713-462a-4988-824c-abb2ceb0205b {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 840.184315] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-cd2ebb42-6f90-4faf-9937-567b5002bce1 tempest-ServerMetadataNegativeTestJSON-946662778 tempest-ServerMetadataNegativeTestJSON-946662778-project-member] Expecting reply to msg eb77d71dda084081a19314cc74b2ddd4 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 840.228611] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg eb77d71dda084081a19314cc74b2ddd4 [ 840.331847] env[62096]: DEBUG oslo_concurrency.lockutils [None req-423ed1ce-80ce-46c1-a5f7-b083eaadd697 tempest-ServerTagsTestJSON-1314597396 tempest-ServerTagsTestJSON-1314597396-project-member] Acquiring lock "e480be29-d25d-4ff2-8de8-26d6c4078ca9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 840.332118] env[62096]: DEBUG oslo_concurrency.lockutils [None req-423ed1ce-80ce-46c1-a5f7-b083eaadd697 tempest-ServerTagsTestJSON-1314597396 tempest-ServerTagsTestJSON-1314597396-project-member] Lock "e480be29-d25d-4ff2-8de8-26d6c4078ca9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 840.424022] env[62096]: DEBUG nova.compute.utils [None req-265784be-d456-4382-9cd2-a751a291282e tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Using /dev/sd instead of None {{(pid=62096) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 840.424692] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-265784be-d456-4382-9cd2-a751a291282e tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg c9e6daa633604ddfb8a74767cbbe8799 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 840.427075] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-6f6a01ff-c14d-41bb-a04a-2330ef16cae3 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Expecting reply to msg 73f56e7730cc4d689f10d4544e7496f5 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 840.435235] env[62096]: DEBUG nova.compute.manager [None req-265784be-d456-4382-9cd2-a751a291282e tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] Allocating IP information in the background. {{(pid=62096) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 840.435235] env[62096]: DEBUG nova.network.neutron [None req-265784be-d456-4382-9cd2-a751a291282e tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] allocate_for_instance() {{(pid=62096) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 840.435235] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c9e6daa633604ddfb8a74767cbbe8799 [ 840.435468] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 73f56e7730cc4d689f10d4544e7496f5 [ 840.487412] env[62096]: DEBUG nova.policy [None req-265784be-d456-4382-9cd2-a751a291282e tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '491a2069427f43e79347a70e475e4dd2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e45573130e8e4ce4979b37e1b4c5af9f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62096) authorize /opt/stack/nova/nova/policy.py:203}} [ 840.707866] env[62096]: INFO nova.scheduler.client.report [None req-cd2ebb42-6f90-4faf-9937-567b5002bce1 tempest-ServerMetadataNegativeTestJSON-946662778 tempest-ServerMetadataNegativeTestJSON-946662778-project-member] Deleted allocations for instance 7a85b800-725c-4d91-90bd-2056eb2fb116 [ 840.714076] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-cd2ebb42-6f90-4faf-9937-567b5002bce1 tempest-ServerMetadataNegativeTestJSON-946662778 tempest-ServerMetadataNegativeTestJSON-946662778-project-member] Expecting reply to msg c25cc49e2136418483ff5d380678df86 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 840.727984] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c25cc49e2136418483ff5d380678df86 [ 840.765192] env[62096]: DEBUG nova.network.neutron [None req-265784be-d456-4382-9cd2-a751a291282e tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] Successfully created port: 38272d5d-1fd8-4295-91b2-3fc9e342614d {{(pid=62096) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 840.930733] env[62096]: DEBUG nova.compute.manager [None req-265784be-d456-4382-9cd2-a751a291282e tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] Start building block device mappings for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 840.932551] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-265784be-d456-4382-9cd2-a751a291282e tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg 8332dde05d64432ca9052bea76bd4b93 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 840.973579] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8332dde05d64432ca9052bea76bd4b93 [ 841.216741] env[62096]: DEBUG oslo_concurrency.lockutils [None req-cd2ebb42-6f90-4faf-9937-567b5002bce1 tempest-ServerMetadataNegativeTestJSON-946662778 tempest-ServerMetadataNegativeTestJSON-946662778-project-member] Lock "7a85b800-725c-4d91-90bd-2056eb2fb116" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 139.345s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 841.217544] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-9602e4c9-801d-4ec4-85d8-84d6ec45cb64 tempest-ServersTestJSON-242390470 tempest-ServersTestJSON-242390470-project-member] Expecting reply to msg 9c7ce028c75b451898a5799c3e03db00 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 841.227746] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c957a45-f491-4bfe-89c2-621fbd457ff5 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.232653] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9c7ce028c75b451898a5799c3e03db00 [ 841.236134] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec76f9cb-96ed-4212-97b4-5d5e2556f4d8 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.268973] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9e9e29a-cf7a-4354-955e-705ecb73991d {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.279373] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6efe7224-cf68-42bb-9a53-c57de58e9440 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.300249] env[62096]: DEBUG nova.compute.provider_tree [None req-6f6a01ff-c14d-41bb-a04a-2330ef16cae3 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 841.300464] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-6f6a01ff-c14d-41bb-a04a-2330ef16cae3 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Expecting reply to msg 9b30f7b10ac84404be35ddadf9851abf in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 841.309643] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9b30f7b10ac84404be35ddadf9851abf [ 841.441883] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-265784be-d456-4382-9cd2-a751a291282e tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg b2724eb309c747c296972dba47d0f99c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 841.475274] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b2724eb309c747c296972dba47d0f99c [ 841.552707] env[62096]: DEBUG nova.compute.manager [req-9d4a2ecc-46cb-43c6-86b7-ae844069eb58 req-f7242aa8-ea0c-4760-a3be-eaaf6d5425bc service nova] [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] Received event network-changed-38272d5d-1fd8-4295-91b2-3fc9e342614d {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 841.552892] env[62096]: DEBUG nova.compute.manager [req-9d4a2ecc-46cb-43c6-86b7-ae844069eb58 req-f7242aa8-ea0c-4760-a3be-eaaf6d5425bc service nova] [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] Refreshing instance network info cache due to event network-changed-38272d5d-1fd8-4295-91b2-3fc9e342614d. {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 841.553102] env[62096]: DEBUG oslo_concurrency.lockutils [req-9d4a2ecc-46cb-43c6-86b7-ae844069eb58 req-f7242aa8-ea0c-4760-a3be-eaaf6d5425bc service nova] Acquiring lock "refresh_cache-012afef5-91e9-4fc1-af98-c17a3188ad45" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 841.553240] env[62096]: DEBUG oslo_concurrency.lockutils [req-9d4a2ecc-46cb-43c6-86b7-ae844069eb58 req-f7242aa8-ea0c-4760-a3be-eaaf6d5425bc service nova] Acquired lock "refresh_cache-012afef5-91e9-4fc1-af98-c17a3188ad45" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 841.553396] env[62096]: DEBUG nova.network.neutron [req-9d4a2ecc-46cb-43c6-86b7-ae844069eb58 req-f7242aa8-ea0c-4760-a3be-eaaf6d5425bc service nova] [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] Refreshing network info cache for port 38272d5d-1fd8-4295-91b2-3fc9e342614d {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 841.553805] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-9d4a2ecc-46cb-43c6-86b7-ae844069eb58 req-f7242aa8-ea0c-4760-a3be-eaaf6d5425bc service nova] Expecting reply to msg d61f520c81bd40248c7ac1e1284a0b07 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 841.561705] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d61f520c81bd40248c7ac1e1284a0b07 [ 841.659687] env[62096]: ERROR nova.compute.manager [None req-265784be-d456-4382-9cd2-a751a291282e tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 38272d5d-1fd8-4295-91b2-3fc9e342614d, please check neutron logs for more information. [ 841.659687] env[62096]: ERROR nova.compute.manager Traceback (most recent call last): [ 841.659687] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 841.659687] env[62096]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 841.659687] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 841.659687] env[62096]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 841.659687] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 841.659687] env[62096]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 841.659687] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 841.659687] env[62096]: ERROR nova.compute.manager self.force_reraise() [ 841.659687] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 841.659687] env[62096]: ERROR nova.compute.manager raise self.value [ 841.659687] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 841.659687] env[62096]: ERROR nova.compute.manager updated_port = self._update_port( [ 841.659687] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 841.659687] env[62096]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 841.660184] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 841.660184] env[62096]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 841.660184] env[62096]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 38272d5d-1fd8-4295-91b2-3fc9e342614d, please check neutron logs for more information. [ 841.660184] env[62096]: ERROR nova.compute.manager [ 841.660184] env[62096]: Traceback (most recent call last): [ 841.660184] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 841.660184] env[62096]: listener.cb(fileno) [ 841.660184] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 841.660184] env[62096]: result = function(*args, **kwargs) [ 841.660184] env[62096]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 841.660184] env[62096]: return func(*args, **kwargs) [ 841.660184] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 841.660184] env[62096]: raise e [ 841.660184] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 841.660184] env[62096]: nwinfo = self.network_api.allocate_for_instance( [ 841.660184] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 841.660184] env[62096]: created_port_ids = self._update_ports_for_instance( [ 841.660184] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 841.660184] env[62096]: with excutils.save_and_reraise_exception(): [ 841.660184] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 841.660184] env[62096]: self.force_reraise() [ 841.660184] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 841.660184] env[62096]: raise self.value [ 841.660184] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 841.660184] env[62096]: updated_port = self._update_port( [ 841.660184] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 841.660184] env[62096]: _ensure_no_port_binding_failure(port) [ 841.660184] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 841.660184] env[62096]: raise exception.PortBindingFailed(port_id=port['id']) [ 841.661019] env[62096]: nova.exception.PortBindingFailed: Binding failed for port 38272d5d-1fd8-4295-91b2-3fc9e342614d, please check neutron logs for more information. [ 841.661019] env[62096]: Removing descriptor: 14 [ 841.719500] env[62096]: DEBUG nova.compute.manager [None req-9602e4c9-801d-4ec4-85d8-84d6ec45cb64 tempest-ServersTestJSON-242390470 tempest-ServersTestJSON-242390470-project-member] [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] Starting instance... {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 841.721437] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-9602e4c9-801d-4ec4-85d8-84d6ec45cb64 tempest-ServersTestJSON-242390470 tempest-ServersTestJSON-242390470-project-member] Expecting reply to msg 70156e6cafef4eeba0e2443c51999764 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 841.754612] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 70156e6cafef4eeba0e2443c51999764 [ 841.803366] env[62096]: DEBUG nova.scheduler.client.report [None req-6f6a01ff-c14d-41bb-a04a-2330ef16cae3 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 841.805873] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-6f6a01ff-c14d-41bb-a04a-2330ef16cae3 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Expecting reply to msg f646054a5ae340199a6bbf79f80b2e89 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 841.817967] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f646054a5ae340199a6bbf79f80b2e89 [ 841.945064] env[62096]: DEBUG nova.compute.manager [None req-265784be-d456-4382-9cd2-a751a291282e tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] Start spawning the instance on the hypervisor. {{(pid=62096) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 841.971187] env[62096]: DEBUG nova.virt.hardware [None req-265784be-d456-4382-9cd2-a751a291282e tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-19T11:52:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-19T11:51:55Z,direct_url=,disk_format='vmdk',id=fb764baa-3805-45c4-a694-aa91b0932110,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bd76085d004b408bbcf1ebf76ba2593e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-19T11:51:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 841.971451] env[62096]: DEBUG nova.virt.hardware [None req-265784be-d456-4382-9cd2-a751a291282e tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Flavor limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 841.971604] env[62096]: DEBUG nova.virt.hardware [None req-265784be-d456-4382-9cd2-a751a291282e tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Image limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 841.971781] env[62096]: DEBUG nova.virt.hardware [None req-265784be-d456-4382-9cd2-a751a291282e tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Flavor pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 841.971922] env[62096]: DEBUG nova.virt.hardware [None req-265784be-d456-4382-9cd2-a751a291282e tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Image pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 841.972080] env[62096]: DEBUG nova.virt.hardware [None req-265784be-d456-4382-9cd2-a751a291282e tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 841.972288] env[62096]: DEBUG nova.virt.hardware [None req-265784be-d456-4382-9cd2-a751a291282e tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 841.972446] env[62096]: DEBUG nova.virt.hardware [None req-265784be-d456-4382-9cd2-a751a291282e tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 841.972606] env[62096]: DEBUG nova.virt.hardware [None req-265784be-d456-4382-9cd2-a751a291282e tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Got 1 possible topologies {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 841.972764] env[62096]: DEBUG nova.virt.hardware [None req-265784be-d456-4382-9cd2-a751a291282e tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 841.972930] env[62096]: DEBUG nova.virt.hardware [None req-265784be-d456-4382-9cd2-a751a291282e tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 841.974063] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c323a171-8ed8-4488-b269-03ed7a603417 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.983401] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-025ca8bc-2a32-413f-b92a-916b76a34112 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.999136] env[62096]: ERROR nova.compute.manager [None req-265784be-d456-4382-9cd2-a751a291282e tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 38272d5d-1fd8-4295-91b2-3fc9e342614d, please check neutron logs for more information. [ 841.999136] env[62096]: ERROR nova.compute.manager [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] Traceback (most recent call last): [ 841.999136] env[62096]: ERROR nova.compute.manager [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 841.999136] env[62096]: ERROR nova.compute.manager [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] yield resources [ 841.999136] env[62096]: ERROR nova.compute.manager [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 841.999136] env[62096]: ERROR nova.compute.manager [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] self.driver.spawn(context, instance, image_meta, [ 841.999136] env[62096]: ERROR nova.compute.manager [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 841.999136] env[62096]: ERROR nova.compute.manager [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] self._vmops.spawn(context, instance, image_meta, injected_files, [ 841.999136] env[62096]: ERROR nova.compute.manager [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 841.999136] env[62096]: ERROR nova.compute.manager [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] vm_ref = self.build_virtual_machine(instance, [ 841.999136] env[62096]: ERROR nova.compute.manager [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 841.999528] env[62096]: ERROR nova.compute.manager [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] vif_infos = vmwarevif.get_vif_info(self._session, [ 841.999528] env[62096]: ERROR nova.compute.manager [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 841.999528] env[62096]: ERROR nova.compute.manager [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] for vif in network_info: [ 841.999528] env[62096]: ERROR nova.compute.manager [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 841.999528] env[62096]: ERROR nova.compute.manager [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] return self._sync_wrapper(fn, *args, **kwargs) [ 841.999528] env[62096]: ERROR nova.compute.manager [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 841.999528] env[62096]: ERROR nova.compute.manager [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] self.wait() [ 841.999528] env[62096]: ERROR nova.compute.manager [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 841.999528] env[62096]: ERROR nova.compute.manager [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] self[:] = self._gt.wait() [ 841.999528] env[62096]: ERROR nova.compute.manager [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 841.999528] env[62096]: ERROR nova.compute.manager [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] return self._exit_event.wait() [ 841.999528] env[62096]: ERROR nova.compute.manager [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 841.999528] env[62096]: ERROR nova.compute.manager [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] current.throw(*self._exc) [ 841.999910] env[62096]: ERROR nova.compute.manager [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 841.999910] env[62096]: ERROR nova.compute.manager [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] result = function(*args, **kwargs) [ 841.999910] env[62096]: ERROR nova.compute.manager [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 841.999910] env[62096]: ERROR nova.compute.manager [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] return func(*args, **kwargs) [ 841.999910] env[62096]: ERROR nova.compute.manager [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 841.999910] env[62096]: ERROR nova.compute.manager [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] raise e [ 841.999910] env[62096]: ERROR nova.compute.manager [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 841.999910] env[62096]: ERROR nova.compute.manager [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] nwinfo = self.network_api.allocate_for_instance( [ 841.999910] env[62096]: ERROR nova.compute.manager [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 841.999910] env[62096]: ERROR nova.compute.manager [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] created_port_ids = self._update_ports_for_instance( [ 841.999910] env[62096]: ERROR nova.compute.manager [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 841.999910] env[62096]: ERROR nova.compute.manager [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] with excutils.save_and_reraise_exception(): [ 841.999910] env[62096]: ERROR nova.compute.manager [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 842.000258] env[62096]: ERROR nova.compute.manager [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] self.force_reraise() [ 842.000258] env[62096]: ERROR nova.compute.manager [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 842.000258] env[62096]: ERROR nova.compute.manager [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] raise self.value [ 842.000258] env[62096]: ERROR nova.compute.manager [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 842.000258] env[62096]: ERROR nova.compute.manager [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] updated_port = self._update_port( [ 842.000258] env[62096]: ERROR nova.compute.manager [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 842.000258] env[62096]: ERROR nova.compute.manager [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] _ensure_no_port_binding_failure(port) [ 842.000258] env[62096]: ERROR nova.compute.manager [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 842.000258] env[62096]: ERROR nova.compute.manager [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] raise exception.PortBindingFailed(port_id=port['id']) [ 842.000258] env[62096]: ERROR nova.compute.manager [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] nova.exception.PortBindingFailed: Binding failed for port 38272d5d-1fd8-4295-91b2-3fc9e342614d, please check neutron logs for more information. [ 842.000258] env[62096]: ERROR nova.compute.manager [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] [ 842.000258] env[62096]: INFO nova.compute.manager [None req-265784be-d456-4382-9cd2-a751a291282e tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] Terminating instance [ 842.005753] env[62096]: DEBUG oslo_concurrency.lockutils [None req-265784be-d456-4382-9cd2-a751a291282e tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Acquiring lock "refresh_cache-012afef5-91e9-4fc1-af98-c17a3188ad45" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 842.072891] env[62096]: DEBUG nova.network.neutron [req-9d4a2ecc-46cb-43c6-86b7-ae844069eb58 req-f7242aa8-ea0c-4760-a3be-eaaf6d5425bc service nova] [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 842.148722] env[62096]: DEBUG nova.network.neutron [req-9d4a2ecc-46cb-43c6-86b7-ae844069eb58 req-f7242aa8-ea0c-4760-a3be-eaaf6d5425bc service nova] [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 842.149267] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-9d4a2ecc-46cb-43c6-86b7-ae844069eb58 req-f7242aa8-ea0c-4760-a3be-eaaf6d5425bc service nova] Expecting reply to msg 100e26cc8b60470a80bc79ba60021ae6 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 842.157380] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 100e26cc8b60470a80bc79ba60021ae6 [ 842.251172] env[62096]: DEBUG oslo_concurrency.lockutils [None req-9602e4c9-801d-4ec4-85d8-84d6ec45cb64 tempest-ServersTestJSON-242390470 tempest-ServersTestJSON-242390470-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 842.308885] env[62096]: DEBUG oslo_concurrency.lockutils [None req-6f6a01ff-c14d-41bb-a04a-2330ef16cae3 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.398s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 842.309426] env[62096]: DEBUG nova.compute.manager [None req-6f6a01ff-c14d-41bb-a04a-2330ef16cae3 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] [instance: 250ef7e7-266b-451d-8627-9cce211d4e83] Start building networks asynchronously for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 842.311066] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-6f6a01ff-c14d-41bb-a04a-2330ef16cae3 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Expecting reply to msg 07ca45d8c1e442b482656cf0b936622f in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 842.317478] env[62096]: DEBUG oslo_concurrency.lockutils [None req-b7c0425f-be19-4b33-b788-3180cc28ca6d tempest-InstanceActionsV221TestJSON-713498315 tempest-InstanceActionsV221TestJSON-713498315-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.266s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 842.317478] env[62096]: INFO nova.compute.claims [None req-b7c0425f-be19-4b33-b788-3180cc28ca6d tempest-InstanceActionsV221TestJSON-713498315 tempest-InstanceActionsV221TestJSON-713498315-project-member] [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 842.317478] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b7c0425f-be19-4b33-b788-3180cc28ca6d tempest-InstanceActionsV221TestJSON-713498315 tempest-InstanceActionsV221TestJSON-713498315-project-member] Expecting reply to msg 9525b9e8ce2a4f6f81c32e3b3feb9d61 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 842.350631] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9525b9e8ce2a4f6f81c32e3b3feb9d61 [ 842.351104] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 07ca45d8c1e442b482656cf0b936622f [ 842.653143] env[62096]: DEBUG oslo_concurrency.lockutils [req-9d4a2ecc-46cb-43c6-86b7-ae844069eb58 req-f7242aa8-ea0c-4760-a3be-eaaf6d5425bc service nova] Releasing lock "refresh_cache-012afef5-91e9-4fc1-af98-c17a3188ad45" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 842.653572] env[62096]: DEBUG oslo_concurrency.lockutils [None req-265784be-d456-4382-9cd2-a751a291282e tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Acquired lock "refresh_cache-012afef5-91e9-4fc1-af98-c17a3188ad45" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 842.653753] env[62096]: DEBUG nova.network.neutron [None req-265784be-d456-4382-9cd2-a751a291282e tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 842.654363] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-265784be-d456-4382-9cd2-a751a291282e tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg e54a731bfc3949dd985085f168437f39 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 842.661641] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e54a731bfc3949dd985085f168437f39 [ 842.818314] env[62096]: DEBUG nova.compute.utils [None req-6f6a01ff-c14d-41bb-a04a-2330ef16cae3 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Using /dev/sd instead of None {{(pid=62096) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 842.818973] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-6f6a01ff-c14d-41bb-a04a-2330ef16cae3 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Expecting reply to msg 2716d8d5a30b42228ef1323e6b2c63e7 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 842.820995] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b7c0425f-be19-4b33-b788-3180cc28ca6d tempest-InstanceActionsV221TestJSON-713498315 tempest-InstanceActionsV221TestJSON-713498315-project-member] Expecting reply to msg 79a807b6dd7d4041b43f1c14bfea6792 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 842.821771] env[62096]: DEBUG nova.compute.manager [None req-6f6a01ff-c14d-41bb-a04a-2330ef16cae3 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] [instance: 250ef7e7-266b-451d-8627-9cce211d4e83] Not allocating networking since 'none' was specified. {{(pid=62096) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 842.828660] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2716d8d5a30b42228ef1323e6b2c63e7 [ 842.829727] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 79a807b6dd7d4041b43f1c14bfea6792 [ 843.198733] env[62096]: DEBUG nova.network.neutron [None req-265784be-d456-4382-9cd2-a751a291282e tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 843.289418] env[62096]: DEBUG nova.network.neutron [None req-265784be-d456-4382-9cd2-a751a291282e tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 843.290022] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-265784be-d456-4382-9cd2-a751a291282e tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg 8a8fb86d2cef473a9760e4f327ad4794 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 843.298531] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8a8fb86d2cef473a9760e4f327ad4794 [ 843.323234] env[62096]: DEBUG nova.compute.manager [None req-6f6a01ff-c14d-41bb-a04a-2330ef16cae3 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] [instance: 250ef7e7-266b-451d-8627-9cce211d4e83] Start building block device mappings for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 843.325001] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-6f6a01ff-c14d-41bb-a04a-2330ef16cae3 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Expecting reply to msg 0d3bc27acb80440cabbff89360318e7e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 843.371381] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0d3bc27acb80440cabbff89360318e7e [ 843.590014] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2776c2e-c994-455f-9c04-306728d06274 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.597920] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf73ec29-901b-4eb0-8cb3-f568d86e1a89 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.603010] env[62096]: DEBUG nova.compute.manager [req-84d4d061-041b-4a7e-8d6e-604e8eb46ddb req-2bcf7635-40c3-41ac-ac46-d7281950dc34 service nova] [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] Received event network-vif-deleted-38272d5d-1fd8-4295-91b2-3fc9e342614d {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 843.630629] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51b56ab7-3d52-4506-b7e7-76ee401fc7bc {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.638115] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cd691ba-4272-41e7-9b8e-8e9b9c455e7c {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.652489] env[62096]: DEBUG nova.compute.provider_tree [None req-b7c0425f-be19-4b33-b788-3180cc28ca6d tempest-InstanceActionsV221TestJSON-713498315 tempest-InstanceActionsV221TestJSON-713498315-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 843.652999] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b7c0425f-be19-4b33-b788-3180cc28ca6d tempest-InstanceActionsV221TestJSON-713498315 tempest-InstanceActionsV221TestJSON-713498315-project-member] Expecting reply to msg 5a3532c1fac44fdca2b78419a1961d45 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 843.661536] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5a3532c1fac44fdca2b78419a1961d45 [ 843.793135] env[62096]: DEBUG oslo_concurrency.lockutils [None req-265784be-d456-4382-9cd2-a751a291282e tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Releasing lock "refresh_cache-012afef5-91e9-4fc1-af98-c17a3188ad45" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 843.793553] env[62096]: DEBUG nova.compute.manager [None req-265784be-d456-4382-9cd2-a751a291282e tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] Start destroying the instance on the hypervisor. {{(pid=62096) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 843.793746] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-265784be-d456-4382-9cd2-a751a291282e tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] Destroying instance {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 843.794121] env[62096]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-89021b7d-1f6e-4135-a05a-e0e2cde7763e {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.802068] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f3c5ad7-bf61-4b13-b95f-0740d11e3e2a {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.822149] env[62096]: WARNING nova.virt.vmwareapi.vmops [None req-265784be-d456-4382-9cd2-a751a291282e tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 012afef5-91e9-4fc1-af98-c17a3188ad45 could not be found. [ 843.822418] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-265784be-d456-4382-9cd2-a751a291282e tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] Instance destroyed {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 843.822522] env[62096]: INFO nova.compute.manager [None req-265784be-d456-4382-9cd2-a751a291282e tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] Took 0.03 seconds to destroy the instance on the hypervisor. [ 843.822750] env[62096]: DEBUG oslo.service.loopingcall [None req-265784be-d456-4382-9cd2-a751a291282e tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 843.822964] env[62096]: DEBUG nova.compute.manager [-] [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 843.823059] env[62096]: DEBUG nova.network.neutron [-] [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 843.831719] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-6f6a01ff-c14d-41bb-a04a-2330ef16cae3 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Expecting reply to msg b2ecfc385ee340baaf951262964d95d0 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 843.840322] env[62096]: DEBUG nova.network.neutron [-] [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 843.840778] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 23df0dabc4114017be0a791c46c4c194 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 843.848804] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 23df0dabc4114017be0a791c46c4c194 [ 843.868846] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b2ecfc385ee340baaf951262964d95d0 [ 844.160026] env[62096]: DEBUG nova.scheduler.client.report [None req-b7c0425f-be19-4b33-b788-3180cc28ca6d tempest-InstanceActionsV221TestJSON-713498315 tempest-InstanceActionsV221TestJSON-713498315-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 844.161770] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b7c0425f-be19-4b33-b788-3180cc28ca6d tempest-InstanceActionsV221TestJSON-713498315 tempest-InstanceActionsV221TestJSON-713498315-project-member] Expecting reply to msg 6c1ac053c0134c44b18e284688af1d3c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 844.172912] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6c1ac053c0134c44b18e284688af1d3c [ 844.335221] env[62096]: DEBUG nova.compute.manager [None req-6f6a01ff-c14d-41bb-a04a-2330ef16cae3 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] [instance: 250ef7e7-266b-451d-8627-9cce211d4e83] Start spawning the instance on the hypervisor. {{(pid=62096) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 844.342560] env[62096]: DEBUG nova.network.neutron [-] [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 844.343145] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 77bf16b6960b47cc96cdcafc89ab068c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 844.351290] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 77bf16b6960b47cc96cdcafc89ab068c [ 844.359503] env[62096]: DEBUG nova.virt.hardware [None req-6f6a01ff-c14d-41bb-a04a-2330ef16cae3 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-19T11:52:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-19T11:51:55Z,direct_url=,disk_format='vmdk',id=fb764baa-3805-45c4-a694-aa91b0932110,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bd76085d004b408bbcf1ebf76ba2593e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-19T11:51:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 844.359731] env[62096]: DEBUG nova.virt.hardware [None req-6f6a01ff-c14d-41bb-a04a-2330ef16cae3 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Flavor limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 844.359887] env[62096]: DEBUG nova.virt.hardware [None req-6f6a01ff-c14d-41bb-a04a-2330ef16cae3 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Image limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 844.360084] env[62096]: DEBUG nova.virt.hardware [None req-6f6a01ff-c14d-41bb-a04a-2330ef16cae3 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Flavor pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 844.360238] env[62096]: DEBUG nova.virt.hardware [None req-6f6a01ff-c14d-41bb-a04a-2330ef16cae3 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Image pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 844.360381] env[62096]: DEBUG nova.virt.hardware [None req-6f6a01ff-c14d-41bb-a04a-2330ef16cae3 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 844.360581] env[62096]: DEBUG nova.virt.hardware [None req-6f6a01ff-c14d-41bb-a04a-2330ef16cae3 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 844.360735] env[62096]: DEBUG nova.virt.hardware [None req-6f6a01ff-c14d-41bb-a04a-2330ef16cae3 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 844.360895] env[62096]: DEBUG nova.virt.hardware [None req-6f6a01ff-c14d-41bb-a04a-2330ef16cae3 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Got 1 possible topologies {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 844.361050] env[62096]: DEBUG nova.virt.hardware [None req-6f6a01ff-c14d-41bb-a04a-2330ef16cae3 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 844.361220] env[62096]: DEBUG nova.virt.hardware [None req-6f6a01ff-c14d-41bb-a04a-2330ef16cae3 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 844.362251] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f15c5d0-5e70-4b29-934a-310acf6122b7 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.370202] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96abcafb-be86-4ffa-a340-4725f640f610 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.382946] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-6f6a01ff-c14d-41bb-a04a-2330ef16cae3 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] [instance: 250ef7e7-266b-451d-8627-9cce211d4e83] Instance VIF info [] {{(pid=62096) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 844.388540] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f6a01ff-c14d-41bb-a04a-2330ef16cae3 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Creating folder: Project (22b7524993dd43ff8d63757157d7cdb8). Parent ref: group-v107847. {{(pid=62096) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 844.388799] env[62096]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e933bed9-b823-43ac-8e7a-b7c31b08a89c {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.402499] env[62096]: INFO nova.virt.vmwareapi.vm_util [None req-6f6a01ff-c14d-41bb-a04a-2330ef16cae3 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Created folder: Project (22b7524993dd43ff8d63757157d7cdb8) in parent group-v107847. [ 844.402668] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f6a01ff-c14d-41bb-a04a-2330ef16cae3 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Creating folder: Instances. Parent ref: group-v107862. {{(pid=62096) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 844.402859] env[62096]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-56ae7ebd-26e2-488a-a566-85a2546d4315 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.410672] env[62096]: INFO nova.virt.vmwareapi.vm_util [None req-6f6a01ff-c14d-41bb-a04a-2330ef16cae3 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Created folder: Instances in parent group-v107862. [ 844.410885] env[62096]: DEBUG oslo.service.loopingcall [None req-6f6a01ff-c14d-41bb-a04a-2330ef16cae3 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 844.411053] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 250ef7e7-266b-451d-8627-9cce211d4e83] Creating VM on the ESX host {{(pid=62096) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 844.411227] env[62096]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1bb9ec60-e4fc-40a8-b4f4-5212efeaf5e2 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.426592] env[62096]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 844.426592] env[62096]: value = "task-397393" [ 844.426592] env[62096]: _type = "Task" [ 844.426592] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.433632] env[62096]: DEBUG oslo_vmware.api [-] Task: {'id': task-397393, 'name': CreateVM_Task} progress is 0%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.664527] env[62096]: DEBUG oslo_concurrency.lockutils [None req-b7c0425f-be19-4b33-b788-3180cc28ca6d tempest-InstanceActionsV221TestJSON-713498315 tempest-InstanceActionsV221TestJSON-713498315-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.352s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 844.665161] env[62096]: DEBUG nova.compute.manager [None req-b7c0425f-be19-4b33-b788-3180cc28ca6d tempest-InstanceActionsV221TestJSON-713498315 tempest-InstanceActionsV221TestJSON-713498315-project-member] [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] Start building networks asynchronously for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 844.666967] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b7c0425f-be19-4b33-b788-3180cc28ca6d tempest-InstanceActionsV221TestJSON-713498315 tempest-InstanceActionsV221TestJSON-713498315-project-member] Expecting reply to msg 8b197fa0958a4bba94488492f7bc9d33 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 844.674200] env[62096]: DEBUG oslo_concurrency.lockutils [None req-e73da030-7e22-49e7-bda0-aec2ff9e56cf tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 13.392s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 844.674200] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-e73da030-7e22-49e7-bda0-aec2ff9e56cf tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Expecting reply to msg bf1058e3454a4fb18662cb80489c7ec1 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 844.703498] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8b197fa0958a4bba94488492f7bc9d33 [ 844.704159] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bf1058e3454a4fb18662cb80489c7ec1 [ 844.845670] env[62096]: INFO nova.compute.manager [-] [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] Took 1.02 seconds to deallocate network for instance. [ 844.849854] env[62096]: DEBUG nova.compute.claims [None req-265784be-d456-4382-9cd2-a751a291282e tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] Aborting claim: {{(pid=62096) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 844.850026] env[62096]: DEBUG oslo_concurrency.lockutils [None req-265784be-d456-4382-9cd2-a751a291282e tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 844.936896] env[62096]: DEBUG oslo_vmware.api [-] Task: {'id': task-397393, 'name': CreateVM_Task, 'duration_secs': 0.250425} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.937071] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 250ef7e7-266b-451d-8627-9cce211d4e83] Created VM on the ESX host {{(pid=62096) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 844.937492] env[62096]: DEBUG oslo_concurrency.lockutils [None req-6f6a01ff-c14d-41bb-a04a-2330ef16cae3 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 844.937647] env[62096]: DEBUG oslo_concurrency.lockutils [None req-6f6a01ff-c14d-41bb-a04a-2330ef16cae3 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Acquired lock "[datastore2] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 844.937990] env[62096]: DEBUG oslo_concurrency.lockutils [None req-6f6a01ff-c14d-41bb-a04a-2330ef16cae3 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 844.938557] env[62096]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2e8a6648-51ce-429d-8a1b-8db4e995c716 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.942923] env[62096]: DEBUG oslo_vmware.api [None req-6f6a01ff-c14d-41bb-a04a-2330ef16cae3 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Waiting for the task: (returnval){ [ 844.942923] env[62096]: value = "session[5264b2d0-7e3b-3d3c-bfd1-bd630cad6eae]527682d1-621d-690a-f0ba-c7ec50477e26" [ 844.942923] env[62096]: _type = "Task" [ 844.942923] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.950113] env[62096]: DEBUG oslo_vmware.api [None req-6f6a01ff-c14d-41bb-a04a-2330ef16cae3 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Task: {'id': session[5264b2d0-7e3b-3d3c-bfd1-bd630cad6eae]527682d1-621d-690a-f0ba-c7ec50477e26, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.177082] env[62096]: DEBUG nova.compute.utils [None req-b7c0425f-be19-4b33-b788-3180cc28ca6d tempest-InstanceActionsV221TestJSON-713498315 tempest-InstanceActionsV221TestJSON-713498315-project-member] Using /dev/sd instead of None {{(pid=62096) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 845.177703] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b7c0425f-be19-4b33-b788-3180cc28ca6d tempest-InstanceActionsV221TestJSON-713498315 tempest-InstanceActionsV221TestJSON-713498315-project-member] Expecting reply to msg 5ea64d988a174cbcaf76118f4d2ca096 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 845.178906] env[62096]: DEBUG nova.compute.manager [None req-b7c0425f-be19-4b33-b788-3180cc28ca6d tempest-InstanceActionsV221TestJSON-713498315 tempest-InstanceActionsV221TestJSON-713498315-project-member] [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] Allocating IP information in the background. {{(pid=62096) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 845.179078] env[62096]: DEBUG nova.network.neutron [None req-b7c0425f-be19-4b33-b788-3180cc28ca6d tempest-InstanceActionsV221TestJSON-713498315 tempest-InstanceActionsV221TestJSON-713498315-project-member] [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] allocate_for_instance() {{(pid=62096) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 845.188035] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5ea64d988a174cbcaf76118f4d2ca096 [ 845.229378] env[62096]: DEBUG nova.policy [None req-b7c0425f-be19-4b33-b788-3180cc28ca6d tempest-InstanceActionsV221TestJSON-713498315 tempest-InstanceActionsV221TestJSON-713498315-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4b07192e6cc24c9a87e2d4af8b0ba0f3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c426317846244c6e9708ab195f52d14b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62096) authorize /opt/stack/nova/nova/policy.py:203}} [ 845.405848] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb6c7a53-7d52-4203-ab61-64df9f77c2c1 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.414412] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47c16048-b945-4bf9-9455-068514d80a38 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.458357] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b644450-11b6-4e49-9d88-5af86920b6c0 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.466432] env[62096]: DEBUG oslo_vmware.api [None req-6f6a01ff-c14d-41bb-a04a-2330ef16cae3 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Task: {'id': session[5264b2d0-7e3b-3d3c-bfd1-bd630cad6eae]527682d1-621d-690a-f0ba-c7ec50477e26, 'name': SearchDatastore_Task, 'duration_secs': 0.010955} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.469438] env[62096]: DEBUG oslo_concurrency.lockutils [None req-6f6a01ff-c14d-41bb-a04a-2330ef16cae3 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Releasing lock "[datastore2] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 845.469935] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-6f6a01ff-c14d-41bb-a04a-2330ef16cae3 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] [instance: 250ef7e7-266b-451d-8627-9cce211d4e83] Processing image fb764baa-3805-45c4-a694-aa91b0932110 {{(pid=62096) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 845.470288] env[62096]: DEBUG oslo_concurrency.lockutils [None req-6f6a01ff-c14d-41bb-a04a-2330ef16cae3 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110/fb764baa-3805-45c4-a694-aa91b0932110.vmdk" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 845.470559] env[62096]: DEBUG oslo_concurrency.lockutils [None req-6f6a01ff-c14d-41bb-a04a-2330ef16cae3 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Acquired lock "[datastore2] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110/fb764baa-3805-45c4-a694-aa91b0932110.vmdk" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 845.470849] env[62096]: DEBUG nova.virt.vmwareapi.ds_util [None req-6f6a01ff-c14d-41bb-a04a-2330ef16cae3 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62096) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 845.471238] env[62096]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c762c72b-577d-440a-9b5e-eebfd92a4847 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.474613] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9db958d6-0483-413b-8ab4-5ceec8442aec {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.490601] env[62096]: DEBUG nova.compute.provider_tree [None req-e73da030-7e22-49e7-bda0-aec2ff9e56cf tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 845.491159] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-e73da030-7e22-49e7-bda0-aec2ff9e56cf tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Expecting reply to msg ea4b05e741814aca86526ff917cc3d7c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 845.493522] env[62096]: DEBUG nova.virt.vmwareapi.ds_util [None req-6f6a01ff-c14d-41bb-a04a-2330ef16cae3 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62096) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 845.493522] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-6f6a01ff-c14d-41bb-a04a-2330ef16cae3 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62096) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 845.494340] env[62096]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-89e49583-9a60-40c7-902e-727867eb8c1f {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.499386] env[62096]: DEBUG oslo_vmware.api [None req-6f6a01ff-c14d-41bb-a04a-2330ef16cae3 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Waiting for the task: (returnval){ [ 845.499386] env[62096]: value = "session[5264b2d0-7e3b-3d3c-bfd1-bd630cad6eae]52f2e650-ab47-4189-4ec1-4ff2605d4c63" [ 845.499386] env[62096]: _type = "Task" [ 845.499386] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.499964] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ea4b05e741814aca86526ff917cc3d7c [ 845.511728] env[62096]: DEBUG oslo_vmware.api [None req-6f6a01ff-c14d-41bb-a04a-2330ef16cae3 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Task: {'id': session[5264b2d0-7e3b-3d3c-bfd1-bd630cad6eae]52f2e650-ab47-4189-4ec1-4ff2605d4c63, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.514136] env[62096]: DEBUG nova.network.neutron [None req-b7c0425f-be19-4b33-b788-3180cc28ca6d tempest-InstanceActionsV221TestJSON-713498315 tempest-InstanceActionsV221TestJSON-713498315-project-member] [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] Successfully created port: b6fb4595-6c02-4377-916c-a91aaa951bfb {{(pid=62096) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 845.682057] env[62096]: DEBUG nova.compute.manager [None req-b7c0425f-be19-4b33-b788-3180cc28ca6d tempest-InstanceActionsV221TestJSON-713498315 tempest-InstanceActionsV221TestJSON-713498315-project-member] [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] Start building block device mappings for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 845.683917] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b7c0425f-be19-4b33-b788-3180cc28ca6d tempest-InstanceActionsV221TestJSON-713498315 tempest-InstanceActionsV221TestJSON-713498315-project-member] Expecting reply to msg 3b7dc11cb92145aa8c1da62a9c67f01e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 845.721248] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3b7dc11cb92145aa8c1da62a9c67f01e [ 845.995765] env[62096]: DEBUG nova.scheduler.client.report [None req-e73da030-7e22-49e7-bda0-aec2ff9e56cf tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 845.998385] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-e73da030-7e22-49e7-bda0-aec2ff9e56cf tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Expecting reply to msg bd4d64309af7496ea7983e1b47ba0c5d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 846.010905] env[62096]: DEBUG oslo_vmware.api [None req-6f6a01ff-c14d-41bb-a04a-2330ef16cae3 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Task: {'id': session[5264b2d0-7e3b-3d3c-bfd1-bd630cad6eae]52f2e650-ab47-4189-4ec1-4ff2605d4c63, 'name': SearchDatastore_Task, 'duration_secs': 0.008653} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.011717] env[62096]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-88731052-506c-4f7c-a655-7d3b7165919f {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.014130] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bd4d64309af7496ea7983e1b47ba0c5d [ 846.017339] env[62096]: DEBUG oslo_vmware.api [None req-6f6a01ff-c14d-41bb-a04a-2330ef16cae3 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Waiting for the task: (returnval){ [ 846.017339] env[62096]: value = "session[5264b2d0-7e3b-3d3c-bfd1-bd630cad6eae]527d87ff-53b5-02d1-e36e-c717511ba046" [ 846.017339] env[62096]: _type = "Task" [ 846.017339] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.025125] env[62096]: DEBUG oslo_vmware.api [None req-6f6a01ff-c14d-41bb-a04a-2330ef16cae3 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Task: {'id': session[5264b2d0-7e3b-3d3c-bfd1-bd630cad6eae]527d87ff-53b5-02d1-e36e-c717511ba046, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.181406] env[62096]: DEBUG nova.compute.manager [req-a5067c6b-1d88-438a-9f1f-559e171698fd req-1d56cab2-4f33-455d-a2e3-2f7fc36735ff service nova] [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] Received event network-changed-b6fb4595-6c02-4377-916c-a91aaa951bfb {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 846.181594] env[62096]: DEBUG nova.compute.manager [req-a5067c6b-1d88-438a-9f1f-559e171698fd req-1d56cab2-4f33-455d-a2e3-2f7fc36735ff service nova] [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] Refreshing instance network info cache due to event network-changed-b6fb4595-6c02-4377-916c-a91aaa951bfb. {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 846.181815] env[62096]: DEBUG oslo_concurrency.lockutils [req-a5067c6b-1d88-438a-9f1f-559e171698fd req-1d56cab2-4f33-455d-a2e3-2f7fc36735ff service nova] Acquiring lock "refresh_cache-fcf84c67-fdea-41d7-aed9-690a45c97eaa" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 846.182001] env[62096]: DEBUG oslo_concurrency.lockutils [req-a5067c6b-1d88-438a-9f1f-559e171698fd req-1d56cab2-4f33-455d-a2e3-2f7fc36735ff service nova] Acquired lock "refresh_cache-fcf84c67-fdea-41d7-aed9-690a45c97eaa" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 846.182171] env[62096]: DEBUG nova.network.neutron [req-a5067c6b-1d88-438a-9f1f-559e171698fd req-1d56cab2-4f33-455d-a2e3-2f7fc36735ff service nova] [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] Refreshing network info cache for port b6fb4595-6c02-4377-916c-a91aaa951bfb {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 846.183012] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-a5067c6b-1d88-438a-9f1f-559e171698fd req-1d56cab2-4f33-455d-a2e3-2f7fc36735ff service nova] Expecting reply to msg 90a6159b567b48e595a413fd86a545ad in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 846.188360] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b7c0425f-be19-4b33-b788-3180cc28ca6d tempest-InstanceActionsV221TestJSON-713498315 tempest-InstanceActionsV221TestJSON-713498315-project-member] Expecting reply to msg dc4ae2c910c74bf2bdcbb3c0b1ce39b2 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 846.190455] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 90a6159b567b48e595a413fd86a545ad [ 846.223684] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dc4ae2c910c74bf2bdcbb3c0b1ce39b2 [ 846.338197] env[62096]: ERROR nova.compute.manager [None req-b7c0425f-be19-4b33-b788-3180cc28ca6d tempest-InstanceActionsV221TestJSON-713498315 tempest-InstanceActionsV221TestJSON-713498315-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port b6fb4595-6c02-4377-916c-a91aaa951bfb, please check neutron logs for more information. [ 846.338197] env[62096]: ERROR nova.compute.manager Traceback (most recent call last): [ 846.338197] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 846.338197] env[62096]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 846.338197] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 846.338197] env[62096]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 846.338197] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 846.338197] env[62096]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 846.338197] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 846.338197] env[62096]: ERROR nova.compute.manager self.force_reraise() [ 846.338197] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 846.338197] env[62096]: ERROR nova.compute.manager raise self.value [ 846.338197] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 846.338197] env[62096]: ERROR nova.compute.manager updated_port = self._update_port( [ 846.338197] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 846.338197] env[62096]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 846.338628] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 846.338628] env[62096]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 846.338628] env[62096]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port b6fb4595-6c02-4377-916c-a91aaa951bfb, please check neutron logs for more information. [ 846.338628] env[62096]: ERROR nova.compute.manager [ 846.338628] env[62096]: Traceback (most recent call last): [ 846.338628] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 846.338628] env[62096]: listener.cb(fileno) [ 846.338628] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 846.338628] env[62096]: result = function(*args, **kwargs) [ 846.338628] env[62096]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 846.338628] env[62096]: return func(*args, **kwargs) [ 846.338628] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 846.338628] env[62096]: raise e [ 846.338628] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 846.338628] env[62096]: nwinfo = self.network_api.allocate_for_instance( [ 846.338628] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 846.338628] env[62096]: created_port_ids = self._update_ports_for_instance( [ 846.338628] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 846.338628] env[62096]: with excutils.save_and_reraise_exception(): [ 846.338628] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 846.338628] env[62096]: self.force_reraise() [ 846.338628] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 846.338628] env[62096]: raise self.value [ 846.338628] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 846.338628] env[62096]: updated_port = self._update_port( [ 846.338628] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 846.338628] env[62096]: _ensure_no_port_binding_failure(port) [ 846.338628] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 846.338628] env[62096]: raise exception.PortBindingFailed(port_id=port['id']) [ 846.339363] env[62096]: nova.exception.PortBindingFailed: Binding failed for port b6fb4595-6c02-4377-916c-a91aaa951bfb, please check neutron logs for more information. [ 846.339363] env[62096]: Removing descriptor: 14 [ 846.501432] env[62096]: DEBUG oslo_concurrency.lockutils [None req-e73da030-7e22-49e7-bda0-aec2ff9e56cf tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.833s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 846.502183] env[62096]: ERROR nova.compute.manager [None req-e73da030-7e22-49e7-bda0-aec2ff9e56cf tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 29221571-5562-44ee-b638-0096025485b7, please check neutron logs for more information. [ 846.502183] env[62096]: ERROR nova.compute.manager [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] Traceback (most recent call last): [ 846.502183] env[62096]: ERROR nova.compute.manager [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 846.502183] env[62096]: ERROR nova.compute.manager [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] self.driver.spawn(context, instance, image_meta, [ 846.502183] env[62096]: ERROR nova.compute.manager [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 846.502183] env[62096]: ERROR nova.compute.manager [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 846.502183] env[62096]: ERROR nova.compute.manager [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 846.502183] env[62096]: ERROR nova.compute.manager [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] vm_ref = self.build_virtual_machine(instance, [ 846.502183] env[62096]: ERROR nova.compute.manager [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 846.502183] env[62096]: ERROR nova.compute.manager [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] vif_infos = vmwarevif.get_vif_info(self._session, [ 846.502183] env[62096]: ERROR nova.compute.manager [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 846.502523] env[62096]: ERROR nova.compute.manager [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] for vif in network_info: [ 846.502523] env[62096]: ERROR nova.compute.manager [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 846.502523] env[62096]: ERROR nova.compute.manager [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] return self._sync_wrapper(fn, *args, **kwargs) [ 846.502523] env[62096]: ERROR nova.compute.manager [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 846.502523] env[62096]: ERROR nova.compute.manager [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] self.wait() [ 846.502523] env[62096]: ERROR nova.compute.manager [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 846.502523] env[62096]: ERROR nova.compute.manager [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] self[:] = self._gt.wait() [ 846.502523] env[62096]: ERROR nova.compute.manager [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 846.502523] env[62096]: ERROR nova.compute.manager [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] return self._exit_event.wait() [ 846.502523] env[62096]: ERROR nova.compute.manager [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 846.502523] env[62096]: ERROR nova.compute.manager [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] result = hub.switch() [ 846.502523] env[62096]: ERROR nova.compute.manager [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 846.502523] env[62096]: ERROR nova.compute.manager [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] return self.greenlet.switch() [ 846.502893] env[62096]: ERROR nova.compute.manager [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 846.502893] env[62096]: ERROR nova.compute.manager [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] result = function(*args, **kwargs) [ 846.502893] env[62096]: ERROR nova.compute.manager [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 846.502893] env[62096]: ERROR nova.compute.manager [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] return func(*args, **kwargs) [ 846.502893] env[62096]: ERROR nova.compute.manager [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 846.502893] env[62096]: ERROR nova.compute.manager [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] raise e [ 846.502893] env[62096]: ERROR nova.compute.manager [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 846.502893] env[62096]: ERROR nova.compute.manager [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] nwinfo = self.network_api.allocate_for_instance( [ 846.502893] env[62096]: ERROR nova.compute.manager [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 846.502893] env[62096]: ERROR nova.compute.manager [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] created_port_ids = self._update_ports_for_instance( [ 846.502893] env[62096]: ERROR nova.compute.manager [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 846.502893] env[62096]: ERROR nova.compute.manager [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] with excutils.save_and_reraise_exception(): [ 846.502893] env[62096]: ERROR nova.compute.manager [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 846.503271] env[62096]: ERROR nova.compute.manager [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] self.force_reraise() [ 846.503271] env[62096]: ERROR nova.compute.manager [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 846.503271] env[62096]: ERROR nova.compute.manager [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] raise self.value [ 846.503271] env[62096]: ERROR nova.compute.manager [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 846.503271] env[62096]: ERROR nova.compute.manager [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] updated_port = self._update_port( [ 846.503271] env[62096]: ERROR nova.compute.manager [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 846.503271] env[62096]: ERROR nova.compute.manager [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] _ensure_no_port_binding_failure(port) [ 846.503271] env[62096]: ERROR nova.compute.manager [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 846.503271] env[62096]: ERROR nova.compute.manager [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] raise exception.PortBindingFailed(port_id=port['id']) [ 846.503271] env[62096]: ERROR nova.compute.manager [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] nova.exception.PortBindingFailed: Binding failed for port 29221571-5562-44ee-b638-0096025485b7, please check neutron logs for more information. [ 846.503271] env[62096]: ERROR nova.compute.manager [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] [ 846.503589] env[62096]: DEBUG nova.compute.utils [None req-e73da030-7e22-49e7-bda0-aec2ff9e56cf tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] Binding failed for port 29221571-5562-44ee-b638-0096025485b7, please check neutron logs for more information. {{(pid=62096) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 846.504241] env[62096]: DEBUG oslo_concurrency.lockutils [None req-4da4b970-cc40-4c6a-8873-d7e30a4b4978 tempest-ServerMetadataTestJSON-1279504626 tempest-ServerMetadataTestJSON-1279504626-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.108s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 846.506047] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4da4b970-cc40-4c6a-8873-d7e30a4b4978 tempest-ServerMetadataTestJSON-1279504626 tempest-ServerMetadataTestJSON-1279504626-project-member] Expecting reply to msg 66a6e5772b024441bbe025d8146a4410 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 846.507277] env[62096]: DEBUG nova.compute.manager [None req-e73da030-7e22-49e7-bda0-aec2ff9e56cf tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] Build of instance 46232e88-ad63-42bc-bf51-2a0758e6ec3a was re-scheduled: Binding failed for port 29221571-5562-44ee-b638-0096025485b7, please check neutron logs for more information. {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 846.507706] env[62096]: DEBUG nova.compute.manager [None req-e73da030-7e22-49e7-bda0-aec2ff9e56cf tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] Unplugging VIFs for instance {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 846.507929] env[62096]: DEBUG oslo_concurrency.lockutils [None req-e73da030-7e22-49e7-bda0-aec2ff9e56cf tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Acquiring lock "refresh_cache-46232e88-ad63-42bc-bf51-2a0758e6ec3a" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 846.508088] env[62096]: DEBUG oslo_concurrency.lockutils [None req-e73da030-7e22-49e7-bda0-aec2ff9e56cf tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Acquired lock "refresh_cache-46232e88-ad63-42bc-bf51-2a0758e6ec3a" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 846.508252] env[62096]: DEBUG nova.network.neutron [None req-e73da030-7e22-49e7-bda0-aec2ff9e56cf tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 846.508607] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-e73da030-7e22-49e7-bda0-aec2ff9e56cf tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Expecting reply to msg 9ca32ac76db246018f4e2141cc201a32 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 846.515142] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9ca32ac76db246018f4e2141cc201a32 [ 846.527526] env[62096]: DEBUG oslo_vmware.api [None req-6f6a01ff-c14d-41bb-a04a-2330ef16cae3 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Task: {'id': session[5264b2d0-7e3b-3d3c-bfd1-bd630cad6eae]527d87ff-53b5-02d1-e36e-c717511ba046, 'name': SearchDatastore_Task, 'duration_secs': 0.008363} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.527754] env[62096]: DEBUG oslo_concurrency.lockutils [None req-6f6a01ff-c14d-41bb-a04a-2330ef16cae3 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Releasing lock "[datastore2] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110/fb764baa-3805-45c4-a694-aa91b0932110.vmdk" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 846.527993] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f6a01ff-c14d-41bb-a04a-2330ef16cae3 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110/fb764baa-3805-45c4-a694-aa91b0932110.vmdk to [datastore2] 250ef7e7-266b-451d-8627-9cce211d4e83/250ef7e7-266b-451d-8627-9cce211d4e83.vmdk {{(pid=62096) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 846.528257] env[62096]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e8e76338-add9-4e4b-8516-7d2f421fb2f8 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.540703] env[62096]: DEBUG oslo_vmware.api [None req-6f6a01ff-c14d-41bb-a04a-2330ef16cae3 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Waiting for the task: (returnval){ [ 846.540703] env[62096]: value = "task-397394" [ 846.540703] env[62096]: _type = "Task" [ 846.540703] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.544081] env[62096]: DEBUG oslo_vmware.api [None req-6f6a01ff-c14d-41bb-a04a-2330ef16cae3 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Task: {'id': task-397394, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.544081] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 66a6e5772b024441bbe025d8146a4410 [ 846.691332] env[62096]: DEBUG nova.compute.manager [None req-b7c0425f-be19-4b33-b788-3180cc28ca6d tempest-InstanceActionsV221TestJSON-713498315 tempest-InstanceActionsV221TestJSON-713498315-project-member] [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] Start spawning the instance on the hypervisor. {{(pid=62096) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 846.708264] env[62096]: DEBUG nova.network.neutron [req-a5067c6b-1d88-438a-9f1f-559e171698fd req-1d56cab2-4f33-455d-a2e3-2f7fc36735ff service nova] [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 846.718473] env[62096]: DEBUG nova.virt.hardware [None req-b7c0425f-be19-4b33-b788-3180cc28ca6d tempest-InstanceActionsV221TestJSON-713498315 tempest-InstanceActionsV221TestJSON-713498315-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-19T11:52:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-19T11:51:55Z,direct_url=,disk_format='vmdk',id=fb764baa-3805-45c4-a694-aa91b0932110,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bd76085d004b408bbcf1ebf76ba2593e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-19T11:51:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 846.718740] env[62096]: DEBUG nova.virt.hardware [None req-b7c0425f-be19-4b33-b788-3180cc28ca6d tempest-InstanceActionsV221TestJSON-713498315 tempest-InstanceActionsV221TestJSON-713498315-project-member] Flavor limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 846.718896] env[62096]: DEBUG nova.virt.hardware [None req-b7c0425f-be19-4b33-b788-3180cc28ca6d tempest-InstanceActionsV221TestJSON-713498315 tempest-InstanceActionsV221TestJSON-713498315-project-member] Image limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 846.719074] env[62096]: DEBUG nova.virt.hardware [None req-b7c0425f-be19-4b33-b788-3180cc28ca6d tempest-InstanceActionsV221TestJSON-713498315 tempest-InstanceActionsV221TestJSON-713498315-project-member] Flavor pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 846.719235] env[62096]: DEBUG nova.virt.hardware [None req-b7c0425f-be19-4b33-b788-3180cc28ca6d tempest-InstanceActionsV221TestJSON-713498315 tempest-InstanceActionsV221TestJSON-713498315-project-member] Image pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 846.719389] env[62096]: DEBUG nova.virt.hardware [None req-b7c0425f-be19-4b33-b788-3180cc28ca6d tempest-InstanceActionsV221TestJSON-713498315 tempest-InstanceActionsV221TestJSON-713498315-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 846.719610] env[62096]: DEBUG nova.virt.hardware [None req-b7c0425f-be19-4b33-b788-3180cc28ca6d tempest-InstanceActionsV221TestJSON-713498315 tempest-InstanceActionsV221TestJSON-713498315-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 846.719851] env[62096]: DEBUG nova.virt.hardware [None req-b7c0425f-be19-4b33-b788-3180cc28ca6d tempest-InstanceActionsV221TestJSON-713498315 tempest-InstanceActionsV221TestJSON-713498315-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 846.720089] env[62096]: DEBUG nova.virt.hardware [None req-b7c0425f-be19-4b33-b788-3180cc28ca6d tempest-InstanceActionsV221TestJSON-713498315 tempest-InstanceActionsV221TestJSON-713498315-project-member] Got 1 possible topologies {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 846.720243] env[62096]: DEBUG nova.virt.hardware [None req-b7c0425f-be19-4b33-b788-3180cc28ca6d tempest-InstanceActionsV221TestJSON-713498315 tempest-InstanceActionsV221TestJSON-713498315-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 846.720491] env[62096]: DEBUG nova.virt.hardware [None req-b7c0425f-be19-4b33-b788-3180cc28ca6d tempest-InstanceActionsV221TestJSON-713498315 tempest-InstanceActionsV221TestJSON-713498315-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 846.721439] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f470216-2286-4c64-93e4-b3d2e8bcf410 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.729812] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f30a731e-4e06-418c-96ff-79d1a04519f8 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.744707] env[62096]: ERROR nova.compute.manager [None req-b7c0425f-be19-4b33-b788-3180cc28ca6d tempest-InstanceActionsV221TestJSON-713498315 tempest-InstanceActionsV221TestJSON-713498315-project-member] [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port b6fb4595-6c02-4377-916c-a91aaa951bfb, please check neutron logs for more information. [ 846.744707] env[62096]: ERROR nova.compute.manager [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] Traceback (most recent call last): [ 846.744707] env[62096]: ERROR nova.compute.manager [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 846.744707] env[62096]: ERROR nova.compute.manager [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] yield resources [ 846.744707] env[62096]: ERROR nova.compute.manager [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 846.744707] env[62096]: ERROR nova.compute.manager [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] self.driver.spawn(context, instance, image_meta, [ 846.744707] env[62096]: ERROR nova.compute.manager [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 846.744707] env[62096]: ERROR nova.compute.manager [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] self._vmops.spawn(context, instance, image_meta, injected_files, [ 846.744707] env[62096]: ERROR nova.compute.manager [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 846.744707] env[62096]: ERROR nova.compute.manager [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] vm_ref = self.build_virtual_machine(instance, [ 846.744707] env[62096]: ERROR nova.compute.manager [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 846.745106] env[62096]: ERROR nova.compute.manager [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] vif_infos = vmwarevif.get_vif_info(self._session, [ 846.745106] env[62096]: ERROR nova.compute.manager [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 846.745106] env[62096]: ERROR nova.compute.manager [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] for vif in network_info: [ 846.745106] env[62096]: ERROR nova.compute.manager [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 846.745106] env[62096]: ERROR nova.compute.manager [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] return self._sync_wrapper(fn, *args, **kwargs) [ 846.745106] env[62096]: ERROR nova.compute.manager [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 846.745106] env[62096]: ERROR nova.compute.manager [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] self.wait() [ 846.745106] env[62096]: ERROR nova.compute.manager [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 846.745106] env[62096]: ERROR nova.compute.manager [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] self[:] = self._gt.wait() [ 846.745106] env[62096]: ERROR nova.compute.manager [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 846.745106] env[62096]: ERROR nova.compute.manager [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] return self._exit_event.wait() [ 846.745106] env[62096]: ERROR nova.compute.manager [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 846.745106] env[62096]: ERROR nova.compute.manager [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] current.throw(*self._exc) [ 846.745452] env[62096]: ERROR nova.compute.manager [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 846.745452] env[62096]: ERROR nova.compute.manager [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] result = function(*args, **kwargs) [ 846.745452] env[62096]: ERROR nova.compute.manager [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 846.745452] env[62096]: ERROR nova.compute.manager [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] return func(*args, **kwargs) [ 846.745452] env[62096]: ERROR nova.compute.manager [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 846.745452] env[62096]: ERROR nova.compute.manager [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] raise e [ 846.745452] env[62096]: ERROR nova.compute.manager [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 846.745452] env[62096]: ERROR nova.compute.manager [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] nwinfo = self.network_api.allocate_for_instance( [ 846.745452] env[62096]: ERROR nova.compute.manager [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 846.745452] env[62096]: ERROR nova.compute.manager [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] created_port_ids = self._update_ports_for_instance( [ 846.745452] env[62096]: ERROR nova.compute.manager [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 846.745452] env[62096]: ERROR nova.compute.manager [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] with excutils.save_and_reraise_exception(): [ 846.745452] env[62096]: ERROR nova.compute.manager [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 846.745859] env[62096]: ERROR nova.compute.manager [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] self.force_reraise() [ 846.745859] env[62096]: ERROR nova.compute.manager [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 846.745859] env[62096]: ERROR nova.compute.manager [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] raise self.value [ 846.745859] env[62096]: ERROR nova.compute.manager [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 846.745859] env[62096]: ERROR nova.compute.manager [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] updated_port = self._update_port( [ 846.745859] env[62096]: ERROR nova.compute.manager [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 846.745859] env[62096]: ERROR nova.compute.manager [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] _ensure_no_port_binding_failure(port) [ 846.745859] env[62096]: ERROR nova.compute.manager [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 846.745859] env[62096]: ERROR nova.compute.manager [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] raise exception.PortBindingFailed(port_id=port['id']) [ 846.745859] env[62096]: ERROR nova.compute.manager [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] nova.exception.PortBindingFailed: Binding failed for port b6fb4595-6c02-4377-916c-a91aaa951bfb, please check neutron logs for more information. [ 846.745859] env[62096]: ERROR nova.compute.manager [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] [ 846.745859] env[62096]: INFO nova.compute.manager [None req-b7c0425f-be19-4b33-b788-3180cc28ca6d tempest-InstanceActionsV221TestJSON-713498315 tempest-InstanceActionsV221TestJSON-713498315-project-member] [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] Terminating instance [ 846.748219] env[62096]: DEBUG oslo_concurrency.lockutils [None req-b7c0425f-be19-4b33-b788-3180cc28ca6d tempest-InstanceActionsV221TestJSON-713498315 tempest-InstanceActionsV221TestJSON-713498315-project-member] Acquiring lock "refresh_cache-fcf84c67-fdea-41d7-aed9-690a45c97eaa" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 846.802840] env[62096]: DEBUG nova.network.neutron [req-a5067c6b-1d88-438a-9f1f-559e171698fd req-1d56cab2-4f33-455d-a2e3-2f7fc36735ff service nova] [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 846.803381] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-a5067c6b-1d88-438a-9f1f-559e171698fd req-1d56cab2-4f33-455d-a2e3-2f7fc36735ff service nova] Expecting reply to msg 7399e877826e4675a01eea643c3e2d56 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 846.816262] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7399e877826e4675a01eea643c3e2d56 [ 847.033530] env[62096]: DEBUG nova.network.neutron [None req-e73da030-7e22-49e7-bda0-aec2ff9e56cf tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 847.045944] env[62096]: DEBUG oslo_vmware.api [None req-6f6a01ff-c14d-41bb-a04a-2330ef16cae3 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Task: {'id': task-397394, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.119245] env[62096]: DEBUG nova.network.neutron [None req-e73da030-7e22-49e7-bda0-aec2ff9e56cf tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 847.119742] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-e73da030-7e22-49e7-bda0-aec2ff9e56cf tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Expecting reply to msg 44b7cd7185f64b4e907de3f7fd0d92d8 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 847.128776] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 44b7cd7185f64b4e907de3f7fd0d92d8 [ 847.264641] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35d83877-a0f5-4506-a4cc-d3cc419f7e39 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.272583] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2513c59c-d271-4f08-aaee-baf7c9d00f0b {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.302592] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2c94da2-175f-43c7-98e7-68d3f38426e5 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.306996] env[62096]: DEBUG oslo_concurrency.lockutils [req-a5067c6b-1d88-438a-9f1f-559e171698fd req-1d56cab2-4f33-455d-a2e3-2f7fc36735ff service nova] Releasing lock "refresh_cache-fcf84c67-fdea-41d7-aed9-690a45c97eaa" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 847.310010] env[62096]: DEBUG oslo_concurrency.lockutils [None req-b7c0425f-be19-4b33-b788-3180cc28ca6d tempest-InstanceActionsV221TestJSON-713498315 tempest-InstanceActionsV221TestJSON-713498315-project-member] Acquired lock "refresh_cache-fcf84c67-fdea-41d7-aed9-690a45c97eaa" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 847.310237] env[62096]: DEBUG nova.network.neutron [None req-b7c0425f-be19-4b33-b788-3180cc28ca6d tempest-InstanceActionsV221TestJSON-713498315 tempest-InstanceActionsV221TestJSON-713498315-project-member] [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 847.310685] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b7c0425f-be19-4b33-b788-3180cc28ca6d tempest-InstanceActionsV221TestJSON-713498315 tempest-InstanceActionsV221TestJSON-713498315-project-member] Expecting reply to msg bae1c02b8ea44dc4883279e85061871b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 847.312641] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39c3bd24-2cf1-4b42-8c80-1256fced3faa {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.318216] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bae1c02b8ea44dc4883279e85061871b [ 847.326535] env[62096]: DEBUG nova.compute.provider_tree [None req-4da4b970-cc40-4c6a-8873-d7e30a4b4978 tempest-ServerMetadataTestJSON-1279504626 tempest-ServerMetadataTestJSON-1279504626-project-member] Updating inventory in ProviderTree for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 847.327087] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4da4b970-cc40-4c6a-8873-d7e30a4b4978 tempest-ServerMetadataTestJSON-1279504626 tempest-ServerMetadataTestJSON-1279504626-project-member] Expecting reply to msg f8c1a8c3d62c4cdfbf1e4b370c74aacd in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 847.337396] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f8c1a8c3d62c4cdfbf1e4b370c74aacd [ 847.545849] env[62096]: DEBUG oslo_vmware.api [None req-6f6a01ff-c14d-41bb-a04a-2330ef16cae3 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Task: {'id': task-397394, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.529463} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.546216] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f6a01ff-c14d-41bb-a04a-2330ef16cae3 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110/fb764baa-3805-45c4-a694-aa91b0932110.vmdk to [datastore2] 250ef7e7-266b-451d-8627-9cce211d4e83/250ef7e7-266b-451d-8627-9cce211d4e83.vmdk {{(pid=62096) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 847.546534] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-6f6a01ff-c14d-41bb-a04a-2330ef16cae3 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] [instance: 250ef7e7-266b-451d-8627-9cce211d4e83] Extending root virtual disk to 1048576 {{(pid=62096) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 847.546832] env[62096]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-48d4902d-708e-42a7-8b57-25fc3b9d98c8 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.552322] env[62096]: DEBUG oslo_vmware.api [None req-6f6a01ff-c14d-41bb-a04a-2330ef16cae3 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Waiting for the task: (returnval){ [ 847.552322] env[62096]: value = "task-397395" [ 847.552322] env[62096]: _type = "Task" [ 847.552322] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.559284] env[62096]: DEBUG oslo_vmware.api [None req-6f6a01ff-c14d-41bb-a04a-2330ef16cae3 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Task: {'id': task-397395, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.621540] env[62096]: DEBUG oslo_concurrency.lockutils [None req-e73da030-7e22-49e7-bda0-aec2ff9e56cf tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Releasing lock "refresh_cache-46232e88-ad63-42bc-bf51-2a0758e6ec3a" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 847.621859] env[62096]: DEBUG nova.compute.manager [None req-e73da030-7e22-49e7-bda0-aec2ff9e56cf tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 847.622112] env[62096]: DEBUG nova.compute.manager [None req-e73da030-7e22-49e7-bda0-aec2ff9e56cf tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 847.622360] env[62096]: DEBUG nova.network.neutron [None req-e73da030-7e22-49e7-bda0-aec2ff9e56cf tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 847.645108] env[62096]: DEBUG nova.network.neutron [None req-e73da030-7e22-49e7-bda0-aec2ff9e56cf tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 847.645727] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-e73da030-7e22-49e7-bda0-aec2ff9e56cf tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Expecting reply to msg 12ff26dec31a4d5cbdb1b88a89dc9201 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 847.652493] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 12ff26dec31a4d5cbdb1b88a89dc9201 [ 847.835311] env[62096]: DEBUG nova.network.neutron [None req-b7c0425f-be19-4b33-b788-3180cc28ca6d tempest-InstanceActionsV221TestJSON-713498315 tempest-InstanceActionsV221TestJSON-713498315-project-member] [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 847.851934] env[62096]: ERROR nova.scheduler.client.report [None req-4da4b970-cc40-4c6a-8873-d7e30a4b4978 tempest-ServerMetadataTestJSON-1279504626 tempest-ServerMetadataTestJSON-1279504626-project-member] [req-4db55fa4-8a2d-40ea-a079-cad9de286df2] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 6eefe13c-ab55-4c03-987f-47a62756c3b3. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-4db55fa4-8a2d-40ea-a079-cad9de286df2"}]}: nova.exception.PortBindingFailed: Binding failed for port 33af3186-72fb-4c89-981b-e53007b95853, please check neutron logs for more information. [ 847.873139] env[62096]: DEBUG nova.scheduler.client.report [None req-4da4b970-cc40-4c6a-8873-d7e30a4b4978 tempest-ServerMetadataTestJSON-1279504626 tempest-ServerMetadataTestJSON-1279504626-project-member] Refreshing inventories for resource provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 847.886419] env[62096]: DEBUG nova.scheduler.client.report [None req-4da4b970-cc40-4c6a-8873-d7e30a4b4978 tempest-ServerMetadataTestJSON-1279504626 tempest-ServerMetadataTestJSON-1279504626-project-member] Updating ProviderTree inventory for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 847.886666] env[62096]: DEBUG nova.compute.provider_tree [None req-4da4b970-cc40-4c6a-8873-d7e30a4b4978 tempest-ServerMetadataTestJSON-1279504626 tempest-ServerMetadataTestJSON-1279504626-project-member] Updating inventory in ProviderTree for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 847.899396] env[62096]: DEBUG nova.scheduler.client.report [None req-4da4b970-cc40-4c6a-8873-d7e30a4b4978 tempest-ServerMetadataTestJSON-1279504626 tempest-ServerMetadataTestJSON-1279504626-project-member] Refreshing aggregate associations for resource provider 6eefe13c-ab55-4c03-987f-47a62756c3b3, aggregates: None {{(pid=62096) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 847.921091] env[62096]: DEBUG nova.scheduler.client.report [None req-4da4b970-cc40-4c6a-8873-d7e30a4b4978 tempest-ServerMetadataTestJSON-1279504626 tempest-ServerMetadataTestJSON-1279504626-project-member] Refreshing trait associations for resource provider 6eefe13c-ab55-4c03-987f-47a62756c3b3, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO {{(pid=62096) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 847.943071] env[62096]: DEBUG nova.network.neutron [None req-b7c0425f-be19-4b33-b788-3180cc28ca6d tempest-InstanceActionsV221TestJSON-713498315 tempest-InstanceActionsV221TestJSON-713498315-project-member] [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 847.947071] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b7c0425f-be19-4b33-b788-3180cc28ca6d tempest-InstanceActionsV221TestJSON-713498315 tempest-InstanceActionsV221TestJSON-713498315-project-member] Expecting reply to msg a104b7767aef4b54843a7c6626574860 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 847.956396] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a104b7767aef4b54843a7c6626574860 [ 848.066897] env[62096]: DEBUG oslo_vmware.api [None req-6f6a01ff-c14d-41bb-a04a-2330ef16cae3 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Task: {'id': task-397395, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.11155} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.067607] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-6f6a01ff-c14d-41bb-a04a-2330ef16cae3 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] [instance: 250ef7e7-266b-451d-8627-9cce211d4e83] Extended root virtual disk {{(pid=62096) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 848.068532] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27105e1e-8189-4eb1-bc24-fddcfb6edfa4 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.096494] env[62096]: DEBUG nova.virt.vmwareapi.volumeops [None req-6f6a01ff-c14d-41bb-a04a-2330ef16cae3 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] [instance: 250ef7e7-266b-451d-8627-9cce211d4e83] Reconfiguring VM instance instance-00000042 to attach disk [datastore2] 250ef7e7-266b-451d-8627-9cce211d4e83/250ef7e7-266b-451d-8627-9cce211d4e83.vmdk or device None with type sparse {{(pid=62096) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 848.099528] env[62096]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-83a0e7e7-f231-463f-89fa-aea97bb4bd8b {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.125957] env[62096]: DEBUG oslo_vmware.api [None req-6f6a01ff-c14d-41bb-a04a-2330ef16cae3 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Waiting for the task: (returnval){ [ 848.125957] env[62096]: value = "task-397396" [ 848.125957] env[62096]: _type = "Task" [ 848.125957] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.135160] env[62096]: DEBUG oslo_vmware.api [None req-6f6a01ff-c14d-41bb-a04a-2330ef16cae3 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Task: {'id': task-397396, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.148369] env[62096]: DEBUG nova.network.neutron [None req-e73da030-7e22-49e7-bda0-aec2ff9e56cf tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 848.148864] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-e73da030-7e22-49e7-bda0-aec2ff9e56cf tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Expecting reply to msg cc66e7ab912f42afaae1f787f29abe8e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 848.157064] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cc66e7ab912f42afaae1f787f29abe8e [ 848.204468] env[62096]: DEBUG nova.compute.manager [req-3ceced5d-bbd3-45d1-a9a3-ab93b6aa40a4 req-8c9dbf93-0b79-4ea8-998c-8d4b48582fce service nova] [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] Received event network-vif-deleted-b6fb4595-6c02-4377-916c-a91aaa951bfb {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 848.216026] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5872ae5-94f2-45b1-9224-19ff5ec6ddf5 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.223147] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba120266-5b4f-42d1-b442-fe5d056960ba {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.253539] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f282b52-75c8-4670-8180-20f67f427d96 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.260512] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c019d5d7-dca2-40b4-94d5-c6d8c5e4ca02 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.275435] env[62096]: DEBUG nova.compute.provider_tree [None req-4da4b970-cc40-4c6a-8873-d7e30a4b4978 tempest-ServerMetadataTestJSON-1279504626 tempest-ServerMetadataTestJSON-1279504626-project-member] Updating inventory in ProviderTree for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 848.275961] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4da4b970-cc40-4c6a-8873-d7e30a4b4978 tempest-ServerMetadataTestJSON-1279504626 tempest-ServerMetadataTestJSON-1279504626-project-member] Expecting reply to msg d9a2639038c245cca5717901fd011c1e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 848.283743] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d9a2639038c245cca5717901fd011c1e [ 848.449909] env[62096]: DEBUG oslo_concurrency.lockutils [None req-b7c0425f-be19-4b33-b788-3180cc28ca6d tempest-InstanceActionsV221TestJSON-713498315 tempest-InstanceActionsV221TestJSON-713498315-project-member] Releasing lock "refresh_cache-fcf84c67-fdea-41d7-aed9-690a45c97eaa" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 848.450435] env[62096]: DEBUG nova.compute.manager [None req-b7c0425f-be19-4b33-b788-3180cc28ca6d tempest-InstanceActionsV221TestJSON-713498315 tempest-InstanceActionsV221TestJSON-713498315-project-member] [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] Start destroying the instance on the hypervisor. {{(pid=62096) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 848.450639] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-b7c0425f-be19-4b33-b788-3180cc28ca6d tempest-InstanceActionsV221TestJSON-713498315 tempest-InstanceActionsV221TestJSON-713498315-project-member] [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] Destroying instance {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 848.450940] env[62096]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f226367e-2add-45e8-8e01-dbc089670c30 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.459573] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffbeaa6c-9348-4ee4-8e19-c00f0b345978 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.480815] env[62096]: WARNING nova.virt.vmwareapi.vmops [None req-b7c0425f-be19-4b33-b788-3180cc28ca6d tempest-InstanceActionsV221TestJSON-713498315 tempest-InstanceActionsV221TestJSON-713498315-project-member] [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance fcf84c67-fdea-41d7-aed9-690a45c97eaa could not be found. [ 848.481024] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-b7c0425f-be19-4b33-b788-3180cc28ca6d tempest-InstanceActionsV221TestJSON-713498315 tempest-InstanceActionsV221TestJSON-713498315-project-member] [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] Instance destroyed {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 848.481205] env[62096]: INFO nova.compute.manager [None req-b7c0425f-be19-4b33-b788-3180cc28ca6d tempest-InstanceActionsV221TestJSON-713498315 tempest-InstanceActionsV221TestJSON-713498315-project-member] [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] Took 0.03 seconds to destroy the instance on the hypervisor. [ 848.481443] env[62096]: DEBUG oslo.service.loopingcall [None req-b7c0425f-be19-4b33-b788-3180cc28ca6d tempest-InstanceActionsV221TestJSON-713498315 tempest-InstanceActionsV221TestJSON-713498315-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 848.481666] env[62096]: DEBUG nova.compute.manager [-] [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 848.481761] env[62096]: DEBUG nova.network.neutron [-] [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 848.496238] env[62096]: DEBUG nova.network.neutron [-] [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 848.496685] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 9d11dfd3d8174a4ea1d0b6868c0e22dd in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 848.503407] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9d11dfd3d8174a4ea1d0b6868c0e22dd [ 848.636455] env[62096]: DEBUG oslo_vmware.api [None req-6f6a01ff-c14d-41bb-a04a-2330ef16cae3 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Task: {'id': task-397396, 'name': ReconfigVM_Task, 'duration_secs': 0.266613} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.636728] env[62096]: DEBUG nova.virt.vmwareapi.volumeops [None req-6f6a01ff-c14d-41bb-a04a-2330ef16cae3 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] [instance: 250ef7e7-266b-451d-8627-9cce211d4e83] Reconfigured VM instance instance-00000042 to attach disk [datastore2] 250ef7e7-266b-451d-8627-9cce211d4e83/250ef7e7-266b-451d-8627-9cce211d4e83.vmdk or device None with type sparse {{(pid=62096) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 848.637333] env[62096]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-44307284-269a-4a77-a80d-96fb52c8a128 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.645030] env[62096]: DEBUG oslo_vmware.api [None req-6f6a01ff-c14d-41bb-a04a-2330ef16cae3 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Waiting for the task: (returnval){ [ 848.645030] env[62096]: value = "task-397397" [ 848.645030] env[62096]: _type = "Task" [ 848.645030] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.651412] env[62096]: INFO nova.compute.manager [None req-e73da030-7e22-49e7-bda0-aec2ff9e56cf tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] [instance: 46232e88-ad63-42bc-bf51-2a0758e6ec3a] Took 1.03 seconds to deallocate network for instance. [ 848.653237] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-e73da030-7e22-49e7-bda0-aec2ff9e56cf tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Expecting reply to msg 47e07d2717db44bb8166e99c15259bb5 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 848.658141] env[62096]: DEBUG oslo_vmware.api [None req-6f6a01ff-c14d-41bb-a04a-2330ef16cae3 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Task: {'id': task-397397, 'name': Rename_Task} progress is 5%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.692207] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 47e07d2717db44bb8166e99c15259bb5 [ 848.805716] env[62096]: DEBUG nova.scheduler.client.report [None req-4da4b970-cc40-4c6a-8873-d7e30a4b4978 tempest-ServerMetadataTestJSON-1279504626 tempest-ServerMetadataTestJSON-1279504626-project-member] Updated inventory for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 with generation 90 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:957}} [ 848.805982] env[62096]: DEBUG nova.compute.provider_tree [None req-4da4b970-cc40-4c6a-8873-d7e30a4b4978 tempest-ServerMetadataTestJSON-1279504626 tempest-ServerMetadataTestJSON-1279504626-project-member] Updating resource provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 generation from 90 to 91 during operation: update_inventory {{(pid=62096) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 848.806165] env[62096]: DEBUG nova.compute.provider_tree [None req-4da4b970-cc40-4c6a-8873-d7e30a4b4978 tempest-ServerMetadataTestJSON-1279504626 tempest-ServerMetadataTestJSON-1279504626-project-member] Updating inventory in ProviderTree for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 848.808652] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4da4b970-cc40-4c6a-8873-d7e30a4b4978 tempest-ServerMetadataTestJSON-1279504626 tempest-ServerMetadataTestJSON-1279504626-project-member] Expecting reply to msg d2b16ca4c5f646deafdec28b6fb5bd66 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 848.819785] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d2b16ca4c5f646deafdec28b6fb5bd66 [ 848.998927] env[62096]: DEBUG nova.network.neutron [-] [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 848.999477] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 08de4124e90c4daba162783ded7e89fb in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 849.008284] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 08de4124e90c4daba162783ded7e89fb [ 849.154391] env[62096]: DEBUG oslo_vmware.api [None req-6f6a01ff-c14d-41bb-a04a-2330ef16cae3 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Task: {'id': task-397397, 'name': Rename_Task, 'duration_secs': 0.13791} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.154670] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f6a01ff-c14d-41bb-a04a-2330ef16cae3 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] [instance: 250ef7e7-266b-451d-8627-9cce211d4e83] Powering on the VM {{(pid=62096) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 849.154897] env[62096]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0a37d68f-bff7-42ef-bd23-fac749c991aa {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.160976] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-e73da030-7e22-49e7-bda0-aec2ff9e56cf tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Expecting reply to msg feedd9bb4f8440518206918eba0b10b7 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 849.163542] env[62096]: DEBUG oslo_vmware.api [None req-6f6a01ff-c14d-41bb-a04a-2330ef16cae3 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Waiting for the task: (returnval){ [ 849.163542] env[62096]: value = "task-397398" [ 849.163542] env[62096]: _type = "Task" [ 849.163542] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.171052] env[62096]: DEBUG oslo_vmware.api [None req-6f6a01ff-c14d-41bb-a04a-2330ef16cae3 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Task: {'id': task-397398, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.195477] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg feedd9bb4f8440518206918eba0b10b7 [ 849.311635] env[62096]: DEBUG oslo_concurrency.lockutils [None req-4da4b970-cc40-4c6a-8873-d7e30a4b4978 tempest-ServerMetadataTestJSON-1279504626 tempest-ServerMetadataTestJSON-1279504626-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.807s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 849.312220] env[62096]: ERROR nova.compute.manager [None req-4da4b970-cc40-4c6a-8873-d7e30a4b4978 tempest-ServerMetadataTestJSON-1279504626 tempest-ServerMetadataTestJSON-1279504626-project-member] [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 33af3186-72fb-4c89-981b-e53007b95853, please check neutron logs for more information. [ 849.312220] env[62096]: ERROR nova.compute.manager [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] Traceback (most recent call last): [ 849.312220] env[62096]: ERROR nova.compute.manager [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 849.312220] env[62096]: ERROR nova.compute.manager [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] self.driver.spawn(context, instance, image_meta, [ 849.312220] env[62096]: ERROR nova.compute.manager [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 849.312220] env[62096]: ERROR nova.compute.manager [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] self._vmops.spawn(context, instance, image_meta, injected_files, [ 849.312220] env[62096]: ERROR nova.compute.manager [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 849.312220] env[62096]: ERROR nova.compute.manager [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] vm_ref = self.build_virtual_machine(instance, [ 849.312220] env[62096]: ERROR nova.compute.manager [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 849.312220] env[62096]: ERROR nova.compute.manager [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] vif_infos = vmwarevif.get_vif_info(self._session, [ 849.312220] env[62096]: ERROR nova.compute.manager [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 849.312549] env[62096]: ERROR nova.compute.manager [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] for vif in network_info: [ 849.312549] env[62096]: ERROR nova.compute.manager [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 849.312549] env[62096]: ERROR nova.compute.manager [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] return self._sync_wrapper(fn, *args, **kwargs) [ 849.312549] env[62096]: ERROR nova.compute.manager [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 849.312549] env[62096]: ERROR nova.compute.manager [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] self.wait() [ 849.312549] env[62096]: ERROR nova.compute.manager [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 849.312549] env[62096]: ERROR nova.compute.manager [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] self[:] = self._gt.wait() [ 849.312549] env[62096]: ERROR nova.compute.manager [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 849.312549] env[62096]: ERROR nova.compute.manager [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] return self._exit_event.wait() [ 849.312549] env[62096]: ERROR nova.compute.manager [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 849.312549] env[62096]: ERROR nova.compute.manager [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] result = hub.switch() [ 849.312549] env[62096]: ERROR nova.compute.manager [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 849.312549] env[62096]: ERROR nova.compute.manager [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] return self.greenlet.switch() [ 849.312859] env[62096]: ERROR nova.compute.manager [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 849.312859] env[62096]: ERROR nova.compute.manager [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] result = function(*args, **kwargs) [ 849.312859] env[62096]: ERROR nova.compute.manager [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 849.312859] env[62096]: ERROR nova.compute.manager [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] return func(*args, **kwargs) [ 849.312859] env[62096]: ERROR nova.compute.manager [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 849.312859] env[62096]: ERROR nova.compute.manager [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] raise e [ 849.312859] env[62096]: ERROR nova.compute.manager [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 849.312859] env[62096]: ERROR nova.compute.manager [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] nwinfo = self.network_api.allocate_for_instance( [ 849.312859] env[62096]: ERROR nova.compute.manager [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 849.312859] env[62096]: ERROR nova.compute.manager [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] created_port_ids = self._update_ports_for_instance( [ 849.312859] env[62096]: ERROR nova.compute.manager [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 849.312859] env[62096]: ERROR nova.compute.manager [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] with excutils.save_and_reraise_exception(): [ 849.312859] env[62096]: ERROR nova.compute.manager [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 849.313166] env[62096]: ERROR nova.compute.manager [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] self.force_reraise() [ 849.313166] env[62096]: ERROR nova.compute.manager [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 849.313166] env[62096]: ERROR nova.compute.manager [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] raise self.value [ 849.313166] env[62096]: ERROR nova.compute.manager [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 849.313166] env[62096]: ERROR nova.compute.manager [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] updated_port = self._update_port( [ 849.313166] env[62096]: ERROR nova.compute.manager [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 849.313166] env[62096]: ERROR nova.compute.manager [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] _ensure_no_port_binding_failure(port) [ 849.313166] env[62096]: ERROR nova.compute.manager [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 849.313166] env[62096]: ERROR nova.compute.manager [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] raise exception.PortBindingFailed(port_id=port['id']) [ 849.313166] env[62096]: ERROR nova.compute.manager [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] nova.exception.PortBindingFailed: Binding failed for port 33af3186-72fb-4c89-981b-e53007b95853, please check neutron logs for more information. [ 849.313166] env[62096]: ERROR nova.compute.manager [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] [ 849.313423] env[62096]: DEBUG nova.compute.utils [None req-4da4b970-cc40-4c6a-8873-d7e30a4b4978 tempest-ServerMetadataTestJSON-1279504626 tempest-ServerMetadataTestJSON-1279504626-project-member] [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] Binding failed for port 33af3186-72fb-4c89-981b-e53007b95853, please check neutron logs for more information. {{(pid=62096) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 849.314397] env[62096]: DEBUG oslo_concurrency.lockutils [None req-5ad96e0f-1e23-473c-97f9-ade218cb855a tempest-InstanceActionsTestJSON-629359483 tempest-InstanceActionsTestJSON-629359483-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.619s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 849.316687] env[62096]: INFO nova.compute.claims [None req-5ad96e0f-1e23-473c-97f9-ade218cb855a tempest-InstanceActionsTestJSON-629359483 tempest-InstanceActionsTestJSON-629359483-project-member] [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 849.318497] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-5ad96e0f-1e23-473c-97f9-ade218cb855a tempest-InstanceActionsTestJSON-629359483 tempest-InstanceActionsTestJSON-629359483-project-member] Expecting reply to msg b586fefe85234af3ac4b82a4d523f107 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 849.319797] env[62096]: DEBUG nova.compute.manager [None req-4da4b970-cc40-4c6a-8873-d7e30a4b4978 tempest-ServerMetadataTestJSON-1279504626 tempest-ServerMetadataTestJSON-1279504626-project-member] [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] Build of instance b54dd1f8-2e8d-446d-9145-d034664b7069 was re-scheduled: Binding failed for port 33af3186-72fb-4c89-981b-e53007b95853, please check neutron logs for more information. {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 849.320329] env[62096]: DEBUG nova.compute.manager [None req-4da4b970-cc40-4c6a-8873-d7e30a4b4978 tempest-ServerMetadataTestJSON-1279504626 tempest-ServerMetadataTestJSON-1279504626-project-member] [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] Unplugging VIFs for instance {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 849.320608] env[62096]: DEBUG oslo_concurrency.lockutils [None req-4da4b970-cc40-4c6a-8873-d7e30a4b4978 tempest-ServerMetadataTestJSON-1279504626 tempest-ServerMetadataTestJSON-1279504626-project-member] Acquiring lock "refresh_cache-b54dd1f8-2e8d-446d-9145-d034664b7069" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 849.320810] env[62096]: DEBUG oslo_concurrency.lockutils [None req-4da4b970-cc40-4c6a-8873-d7e30a4b4978 tempest-ServerMetadataTestJSON-1279504626 tempest-ServerMetadataTestJSON-1279504626-project-member] Acquired lock "refresh_cache-b54dd1f8-2e8d-446d-9145-d034664b7069" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 849.321026] env[62096]: DEBUG nova.network.neutron [None req-4da4b970-cc40-4c6a-8873-d7e30a4b4978 tempest-ServerMetadataTestJSON-1279504626 tempest-ServerMetadataTestJSON-1279504626-project-member] [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 849.321489] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4da4b970-cc40-4c6a-8873-d7e30a4b4978 tempest-ServerMetadataTestJSON-1279504626 tempest-ServerMetadataTestJSON-1279504626-project-member] Expecting reply to msg 3323cdd8836b4ba9857a5fa1b08a23bc in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 849.331341] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3323cdd8836b4ba9857a5fa1b08a23bc [ 849.351827] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b586fefe85234af3ac4b82a4d523f107 [ 849.501522] env[62096]: INFO nova.compute.manager [-] [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] Took 1.02 seconds to deallocate network for instance. [ 849.504027] env[62096]: DEBUG nova.compute.claims [None req-b7c0425f-be19-4b33-b788-3180cc28ca6d tempest-InstanceActionsV221TestJSON-713498315 tempest-InstanceActionsV221TestJSON-713498315-project-member] [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] Aborting claim: {{(pid=62096) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 849.504120] env[62096]: DEBUG oslo_concurrency.lockutils [None req-b7c0425f-be19-4b33-b788-3180cc28ca6d tempest-InstanceActionsV221TestJSON-713498315 tempest-InstanceActionsV221TestJSON-713498315-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 849.675468] env[62096]: DEBUG oslo_vmware.api [None req-6f6a01ff-c14d-41bb-a04a-2330ef16cae3 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Task: {'id': task-397398, 'name': PowerOnVM_Task, 'duration_secs': 0.393701} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.675684] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f6a01ff-c14d-41bb-a04a-2330ef16cae3 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] [instance: 250ef7e7-266b-451d-8627-9cce211d4e83] Powered on the VM {{(pid=62096) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 849.675876] env[62096]: INFO nova.compute.manager [None req-6f6a01ff-c14d-41bb-a04a-2330ef16cae3 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] [instance: 250ef7e7-266b-451d-8627-9cce211d4e83] Took 5.34 seconds to spawn the instance on the hypervisor. [ 849.676058] env[62096]: DEBUG nova.compute.manager [None req-6f6a01ff-c14d-41bb-a04a-2330ef16cae3 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] [instance: 250ef7e7-266b-451d-8627-9cce211d4e83] Checking state {{(pid=62096) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 849.676793] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13ea0294-4841-4927-91ad-cf40a394a4fb {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.684559] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-6f6a01ff-c14d-41bb-a04a-2330ef16cae3 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Expecting reply to msg 10294b531b8b41bca4fc1a653ab1d777 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 849.686425] env[62096]: INFO nova.scheduler.client.report [None req-e73da030-7e22-49e7-bda0-aec2ff9e56cf tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Deleted allocations for instance 46232e88-ad63-42bc-bf51-2a0758e6ec3a [ 849.693374] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-e73da030-7e22-49e7-bda0-aec2ff9e56cf tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Expecting reply to msg 0a021d7eb0e44ffc8c556c85142e054f in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 849.705666] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0a021d7eb0e44ffc8c556c85142e054f [ 849.724564] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 10294b531b8b41bca4fc1a653ab1d777 [ 849.824639] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-5ad96e0f-1e23-473c-97f9-ade218cb855a tempest-InstanceActionsTestJSON-629359483 tempest-InstanceActionsTestJSON-629359483-project-member] Expecting reply to msg f944e82c82664d688fd341f6269b9512 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 849.834268] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f944e82c82664d688fd341f6269b9512 [ 849.842390] env[62096]: DEBUG nova.network.neutron [None req-4da4b970-cc40-4c6a-8873-d7e30a4b4978 tempest-ServerMetadataTestJSON-1279504626 tempest-ServerMetadataTestJSON-1279504626-project-member] [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 849.915977] env[62096]: DEBUG nova.network.neutron [None req-4da4b970-cc40-4c6a-8873-d7e30a4b4978 tempest-ServerMetadataTestJSON-1279504626 tempest-ServerMetadataTestJSON-1279504626-project-member] [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 849.915977] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4da4b970-cc40-4c6a-8873-d7e30a4b4978 tempest-ServerMetadataTestJSON-1279504626 tempest-ServerMetadataTestJSON-1279504626-project-member] Expecting reply to msg c985c49edba84480a7ab8c221c3a35c7 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 849.924101] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c985c49edba84480a7ab8c221c3a35c7 [ 850.198132] env[62096]: DEBUG oslo_concurrency.lockutils [None req-e73da030-7e22-49e7-bda0-aec2ff9e56cf tempest-ServersTestMultiNic-570865357 tempest-ServersTestMultiNic-570865357-project-member] Lock "46232e88-ad63-42bc-bf51-2a0758e6ec3a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 141.076s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 850.200066] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-bb5902e0-898e-4848-be60-0804ae5f57a5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg 1249a04c1f164ae6b29b93f2713fb3f3 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 850.201554] env[62096]: INFO nova.compute.manager [None req-6f6a01ff-c14d-41bb-a04a-2330ef16cae3 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] [instance: 250ef7e7-266b-451d-8627-9cce211d4e83] Took 25.90 seconds to build instance. [ 850.201554] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-6f6a01ff-c14d-41bb-a04a-2330ef16cae3 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Expecting reply to msg 906b9198ccc047b8835e4a4d58c80004 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 850.213650] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1249a04c1f164ae6b29b93f2713fb3f3 [ 850.216299] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 906b9198ccc047b8835e4a4d58c80004 [ 850.417246] env[62096]: DEBUG oslo_concurrency.lockutils [None req-4da4b970-cc40-4c6a-8873-d7e30a4b4978 tempest-ServerMetadataTestJSON-1279504626 tempest-ServerMetadataTestJSON-1279504626-project-member] Releasing lock "refresh_cache-b54dd1f8-2e8d-446d-9145-d034664b7069" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 850.417477] env[62096]: DEBUG nova.compute.manager [None req-4da4b970-cc40-4c6a-8873-d7e30a4b4978 tempest-ServerMetadataTestJSON-1279504626 tempest-ServerMetadataTestJSON-1279504626-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 850.417662] env[62096]: DEBUG nova.compute.manager [None req-4da4b970-cc40-4c6a-8873-d7e30a4b4978 tempest-ServerMetadataTestJSON-1279504626 tempest-ServerMetadataTestJSON-1279504626-project-member] [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 850.417827] env[62096]: DEBUG nova.network.neutron [None req-4da4b970-cc40-4c6a-8873-d7e30a4b4978 tempest-ServerMetadataTestJSON-1279504626 tempest-ServerMetadataTestJSON-1279504626-project-member] [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 850.431976] env[62096]: DEBUG nova.network.neutron [None req-4da4b970-cc40-4c6a-8873-d7e30a4b4978 tempest-ServerMetadataTestJSON-1279504626 tempest-ServerMetadataTestJSON-1279504626-project-member] [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 850.432623] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4da4b970-cc40-4c6a-8873-d7e30a4b4978 tempest-ServerMetadataTestJSON-1279504626 tempest-ServerMetadataTestJSON-1279504626-project-member] Expecting reply to msg 09feebb2b46943198dc44c1688e07c7a in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 850.438755] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 09feebb2b46943198dc44c1688e07c7a [ 850.575477] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7e7fe39-1919-4df0-9e84-9b6cf7165f48 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.583005] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27e782a4-cb4c-4153-83b2-cbaa5b6eefe2 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.616599] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e9d36e7-41df-429b-a83d-4f7f76976eee {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.624621] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7fe602a-d635-49df-a46b-093d35024cb4 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.640965] env[62096]: DEBUG nova.compute.provider_tree [None req-5ad96e0f-1e23-473c-97f9-ade218cb855a tempest-InstanceActionsTestJSON-629359483 tempest-InstanceActionsTestJSON-629359483-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 850.641605] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-5ad96e0f-1e23-473c-97f9-ade218cb855a tempest-InstanceActionsTestJSON-629359483 tempest-InstanceActionsTestJSON-629359483-project-member] Expecting reply to msg aa9a61f1915a49888a1710e0eb12f7f9 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 850.648787] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg aa9a61f1915a49888a1710e0eb12f7f9 [ 850.704396] env[62096]: DEBUG nova.compute.manager [None req-bb5902e0-898e-4848-be60-0804ae5f57a5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: aa3e06b6-89f8-425f-8950-30deb025a473] Starting instance... {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 850.706310] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-bb5902e0-898e-4848-be60-0804ae5f57a5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg fdb1ecd189434f54b50b3f9a0c289cea in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 850.708267] env[62096]: DEBUG oslo_concurrency.lockutils [None req-6f6a01ff-c14d-41bb-a04a-2330ef16cae3 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Lock "250ef7e7-266b-451d-8627-9cce211d4e83" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 120.571s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 850.709181] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-7bd8e5dd-5e58-4625-8471-f91ad349cc84 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Expecting reply to msg 0965388edad44a8d96555ad9e8869f45 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 850.726696] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0965388edad44a8d96555ad9e8869f45 [ 850.741807] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-9083a630-3722-46bd-841d-5e4bacb9e547 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Expecting reply to msg 71223018e19d449ba181821db35310c6 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 850.743052] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fdb1ecd189434f54b50b3f9a0c289cea [ 850.750104] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 71223018e19d449ba181821db35310c6 [ 850.934514] env[62096]: DEBUG nova.network.neutron [None req-4da4b970-cc40-4c6a-8873-d7e30a4b4978 tempest-ServerMetadataTestJSON-1279504626 tempest-ServerMetadataTestJSON-1279504626-project-member] [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 850.935049] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4da4b970-cc40-4c6a-8873-d7e30a4b4978 tempest-ServerMetadataTestJSON-1279504626 tempest-ServerMetadataTestJSON-1279504626-project-member] Expecting reply to msg 8ebf42d0f7e14fe2aad4d883ab20ddd0 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 850.942972] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8ebf42d0f7e14fe2aad4d883ab20ddd0 [ 851.144491] env[62096]: DEBUG nova.scheduler.client.report [None req-5ad96e0f-1e23-473c-97f9-ade218cb855a tempest-InstanceActionsTestJSON-629359483 tempest-InstanceActionsTestJSON-629359483-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 851.148057] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-5ad96e0f-1e23-473c-97f9-ade218cb855a tempest-InstanceActionsTestJSON-629359483 tempest-InstanceActionsTestJSON-629359483-project-member] Expecting reply to msg 70ef89c577e24f069522a19e30c1b9a8 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 851.159482] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 70ef89c577e24f069522a19e30c1b9a8 [ 851.218133] env[62096]: DEBUG nova.compute.manager [None req-7bd8e5dd-5e58-4625-8471-f91ad349cc84 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] [instance: 5627f913-29d2-476e-8fde-8ea457cc56f2] Starting instance... {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 851.220080] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-7bd8e5dd-5e58-4625-8471-f91ad349cc84 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Expecting reply to msg ace7aa4dca2d4ae5b93c2d6b85a0242c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 851.232401] env[62096]: DEBUG oslo_concurrency.lockutils [None req-bb5902e0-898e-4848-be60-0804ae5f57a5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 851.245194] env[62096]: INFO nova.compute.manager [None req-9083a630-3722-46bd-841d-5e4bacb9e547 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] [instance: 250ef7e7-266b-451d-8627-9cce211d4e83] Rebuilding instance [ 851.259010] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ace7aa4dca2d4ae5b93c2d6b85a0242c [ 851.294494] env[62096]: DEBUG nova.compute.manager [None req-9083a630-3722-46bd-841d-5e4bacb9e547 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] [instance: 250ef7e7-266b-451d-8627-9cce211d4e83] Checking state {{(pid=62096) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 851.294494] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcecac05-972b-4923-8226-bb7c44998189 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.299597] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-9083a630-3722-46bd-841d-5e4bacb9e547 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Expecting reply to msg bdfc69321de348639a87e7c046333e0f in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 851.341001] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bdfc69321de348639a87e7c046333e0f [ 851.436976] env[62096]: INFO nova.compute.manager [None req-4da4b970-cc40-4c6a-8873-d7e30a4b4978 tempest-ServerMetadataTestJSON-1279504626 tempest-ServerMetadataTestJSON-1279504626-project-member] [instance: b54dd1f8-2e8d-446d-9145-d034664b7069] Took 1.02 seconds to deallocate network for instance. [ 851.438686] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4da4b970-cc40-4c6a-8873-d7e30a4b4978 tempest-ServerMetadataTestJSON-1279504626 tempest-ServerMetadataTestJSON-1279504626-project-member] Expecting reply to msg 3841cd7cf03f45b39e05bf431c54468f in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 851.473110] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3841cd7cf03f45b39e05bf431c54468f [ 851.651002] env[62096]: DEBUG oslo_concurrency.lockutils [None req-5ad96e0f-1e23-473c-97f9-ade218cb855a tempest-InstanceActionsTestJSON-629359483 tempest-InstanceActionsTestJSON-629359483-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.337s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 851.651604] env[62096]: DEBUG nova.compute.manager [None req-5ad96e0f-1e23-473c-97f9-ade218cb855a tempest-InstanceActionsTestJSON-629359483 tempest-InstanceActionsTestJSON-629359483-project-member] [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] Start building networks asynchronously for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 851.653270] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-5ad96e0f-1e23-473c-97f9-ade218cb855a tempest-InstanceActionsTestJSON-629359483 tempest-InstanceActionsTestJSON-629359483-project-member] Expecting reply to msg cf7a15b0ee474272827bd28bc4f16eb6 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 851.658663] env[62096]: DEBUG oslo_concurrency.lockutils [None req-c8a2d481-d63a-4d02-82f0-f3787fb45049 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.133s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 851.659364] env[62096]: INFO nova.compute.claims [None req-c8a2d481-d63a-4d02-82f0-f3787fb45049 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] [instance: a7838568-9d47-4306-8bb6-9ad74ab1feb3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 851.660366] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c8a2d481-d63a-4d02-82f0-f3787fb45049 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Expecting reply to msg cbf98f294cb242fcaa4a2ff7e66cef4f in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 851.686712] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cf7a15b0ee474272827bd28bc4f16eb6 [ 851.698879] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cbf98f294cb242fcaa4a2ff7e66cef4f [ 851.736000] env[62096]: DEBUG oslo_concurrency.lockutils [None req-7bd8e5dd-5e58-4625-8471-f91ad349cc84 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 851.802786] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [None req-9083a630-3722-46bd-841d-5e4bacb9e547 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] [instance: 250ef7e7-266b-451d-8627-9cce211d4e83] Powering off the VM {{(pid=62096) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 851.803419] env[62096]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b1c45fa4-1b6e-402f-91e7-b45719236a0d {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.810914] env[62096]: DEBUG oslo_vmware.api [None req-9083a630-3722-46bd-841d-5e4bacb9e547 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Waiting for the task: (returnval){ [ 851.810914] env[62096]: value = "task-397399" [ 851.810914] env[62096]: _type = "Task" [ 851.810914] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 851.818785] env[62096]: DEBUG oslo_vmware.api [None req-9083a630-3722-46bd-841d-5e4bacb9e547 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Task: {'id': task-397399, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.943142] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4da4b970-cc40-4c6a-8873-d7e30a4b4978 tempest-ServerMetadataTestJSON-1279504626 tempest-ServerMetadataTestJSON-1279504626-project-member] Expecting reply to msg c7450a32d9e24ce3a0e09b24e703206b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 851.974983] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c7450a32d9e24ce3a0e09b24e703206b [ 852.160608] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg c01ebed5f8c3494aa7735c06983a7115 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 852.162977] env[62096]: DEBUG nova.compute.utils [None req-5ad96e0f-1e23-473c-97f9-ade218cb855a tempest-InstanceActionsTestJSON-629359483 tempest-InstanceActionsTestJSON-629359483-project-member] Using /dev/sd instead of None {{(pid=62096) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 852.163574] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-5ad96e0f-1e23-473c-97f9-ade218cb855a tempest-InstanceActionsTestJSON-629359483 tempest-InstanceActionsTestJSON-629359483-project-member] Expecting reply to msg 30d280bc311b495aaa1ae9d98993878f in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 852.167367] env[62096]: DEBUG nova.compute.manager [None req-5ad96e0f-1e23-473c-97f9-ade218cb855a tempest-InstanceActionsTestJSON-629359483 tempest-InstanceActionsTestJSON-629359483-project-member] [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] Allocating IP information in the background. {{(pid=62096) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 852.167573] env[62096]: DEBUG nova.network.neutron [None req-5ad96e0f-1e23-473c-97f9-ade218cb855a tempest-InstanceActionsTestJSON-629359483 tempest-InstanceActionsTestJSON-629359483-project-member] [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] allocate_for_instance() {{(pid=62096) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 852.170882] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c8a2d481-d63a-4d02-82f0-f3787fb45049 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Expecting reply to msg fdd1e6d3d5ce458cb6c223c0549e372d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 852.171852] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c01ebed5f8c3494aa7735c06983a7115 [ 852.175626] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 30d280bc311b495aaa1ae9d98993878f [ 852.178994] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fdd1e6d3d5ce458cb6c223c0549e372d [ 852.214735] env[62096]: DEBUG nova.policy [None req-5ad96e0f-1e23-473c-97f9-ade218cb855a tempest-InstanceActionsTestJSON-629359483 tempest-InstanceActionsTestJSON-629359483-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fb798bd1a907449993f918ab6e3948c1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '275e408898834dbeb18e126f50fee061', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62096) authorize /opt/stack/nova/nova/policy.py:203}} [ 852.321610] env[62096]: DEBUG oslo_vmware.api [None req-9083a630-3722-46bd-841d-5e4bacb9e547 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Task: {'id': task-397399, 'name': PowerOffVM_Task, 'duration_secs': 0.109974} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.321901] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [None req-9083a630-3722-46bd-841d-5e4bacb9e547 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] [instance: 250ef7e7-266b-451d-8627-9cce211d4e83] Powered off the VM {{(pid=62096) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 852.322084] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-9083a630-3722-46bd-841d-5e4bacb9e547 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] [instance: 250ef7e7-266b-451d-8627-9cce211d4e83] Destroying instance {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 852.322844] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2964b6d3-5247-4bf1-8617-248e45515877 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.329737] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-9083a630-3722-46bd-841d-5e4bacb9e547 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] [instance: 250ef7e7-266b-451d-8627-9cce211d4e83] Unregistering the VM {{(pid=62096) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 852.329892] env[62096]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a30c51e1-750d-4c1a-9700-fd3633509644 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.353884] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-9083a630-3722-46bd-841d-5e4bacb9e547 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] [instance: 250ef7e7-266b-451d-8627-9cce211d4e83] Unregistered the VM {{(pid=62096) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 852.354148] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-9083a630-3722-46bd-841d-5e4bacb9e547 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] [instance: 250ef7e7-266b-451d-8627-9cce211d4e83] Deleting contents of the VM from datastore datastore2 {{(pid=62096) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 852.354306] env[62096]: DEBUG nova.virt.vmwareapi.ds_util [None req-9083a630-3722-46bd-841d-5e4bacb9e547 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Deleting the datastore file [datastore2] 250ef7e7-266b-451d-8627-9cce211d4e83 {{(pid=62096) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 852.354571] env[62096]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f6b70ef4-4d9d-409d-9b67-704afecb87d7 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.360706] env[62096]: DEBUG oslo_vmware.api [None req-9083a630-3722-46bd-841d-5e4bacb9e547 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Waiting for the task: (returnval){ [ 852.360706] env[62096]: value = "task-397401" [ 852.360706] env[62096]: _type = "Task" [ 852.360706] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.368591] env[62096]: DEBUG oslo_vmware.api [None req-9083a630-3722-46bd-841d-5e4bacb9e547 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Task: {'id': task-397401, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.472145] env[62096]: INFO nova.scheduler.client.report [None req-4da4b970-cc40-4c6a-8873-d7e30a4b4978 tempest-ServerMetadataTestJSON-1279504626 tempest-ServerMetadataTestJSON-1279504626-project-member] Deleted allocations for instance b54dd1f8-2e8d-446d-9145-d034664b7069 [ 852.492054] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4da4b970-cc40-4c6a-8873-d7e30a4b4978 tempest-ServerMetadataTestJSON-1279504626 tempest-ServerMetadataTestJSON-1279504626-project-member] Expecting reply to msg 9961c4f430d14b1b965aba76b5c9a27d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 852.498495] env[62096]: DEBUG nova.network.neutron [None req-5ad96e0f-1e23-473c-97f9-ade218cb855a tempest-InstanceActionsTestJSON-629359483 tempest-InstanceActionsTestJSON-629359483-project-member] [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] Successfully created port: 58359077-282b-4662-a530-b190af2de477 {{(pid=62096) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 852.512502] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9961c4f430d14b1b965aba76b5c9a27d [ 852.670306] env[62096]: DEBUG nova.compute.manager [None req-5ad96e0f-1e23-473c-97f9-ade218cb855a tempest-InstanceActionsTestJSON-629359483 tempest-InstanceActionsTestJSON-629359483-project-member] [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] Start building block device mappings for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 852.672095] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-5ad96e0f-1e23-473c-97f9-ade218cb855a tempest-InstanceActionsTestJSON-629359483 tempest-InstanceActionsTestJSON-629359483-project-member] Expecting reply to msg cd1cd45a3d8043a5a98ad3f379af21b7 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 852.711584] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cd1cd45a3d8043a5a98ad3f379af21b7 [ 852.870573] env[62096]: DEBUG oslo_vmware.api [None req-9083a630-3722-46bd-841d-5e4bacb9e547 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Task: {'id': task-397401, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.089543} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.870838] env[62096]: DEBUG nova.virt.vmwareapi.ds_util [None req-9083a630-3722-46bd-841d-5e4bacb9e547 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Deleted the datastore file {{(pid=62096) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 852.871023] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-9083a630-3722-46bd-841d-5e4bacb9e547 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] [instance: 250ef7e7-266b-451d-8627-9cce211d4e83] Deleted contents of the VM from datastore datastore2 {{(pid=62096) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 852.871208] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-9083a630-3722-46bd-841d-5e4bacb9e547 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] [instance: 250ef7e7-266b-451d-8627-9cce211d4e83] Instance destroyed {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 852.872788] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-9083a630-3722-46bd-841d-5e4bacb9e547 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Expecting reply to msg c05a469a3afc4fa096dba0f20778615e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 852.911667] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e73af65-3673-4131-9a03-0629980d3e00 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.914744] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c05a469a3afc4fa096dba0f20778615e [ 852.920827] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c876406-21df-4116-b4e5-8861c97cbde8 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.952673] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4b67e2e-c41b-4cda-9839-4ebd5464b3e9 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.960644] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cb94d18-3f10-45f5-8bd3-37f0359bd6a4 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.977029] env[62096]: DEBUG nova.compute.provider_tree [None req-c8a2d481-d63a-4d02-82f0-f3787fb45049 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Updating inventory in ProviderTree for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 852.977796] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c8a2d481-d63a-4d02-82f0-f3787fb45049 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Expecting reply to msg 9e07ea8ab7414788ad227b9c6ce7d68c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 852.986785] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9e07ea8ab7414788ad227b9c6ce7d68c [ 852.995472] env[62096]: DEBUG oslo_concurrency.lockutils [None req-4da4b970-cc40-4c6a-8873-d7e30a4b4978 tempest-ServerMetadataTestJSON-1279504626 tempest-ServerMetadataTestJSON-1279504626-project-member] Lock "b54dd1f8-2e8d-446d-9145-d034664b7069" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 137.709s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 852.995893] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-3fd8e1c2-0ecc-40e5-90b0-01e465c5f366 tempest-ServersV294TestFqdnHostnames-2113678617 tempest-ServersV294TestFqdnHostnames-2113678617-project-member] Expecting reply to msg 5dac410cc3b24af3bbbffb22ef5ab363 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 853.006309] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5dac410cc3b24af3bbbffb22ef5ab363 [ 853.179474] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-5ad96e0f-1e23-473c-97f9-ade218cb855a tempest-InstanceActionsTestJSON-629359483 tempest-InstanceActionsTestJSON-629359483-project-member] Expecting reply to msg 37e40ead53684c85b18a767adf1db4cf in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 853.210761] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 37e40ead53684c85b18a767adf1db4cf [ 853.277679] env[62096]: DEBUG nova.compute.manager [req-c25a0a33-2ad8-48b4-adc0-6de9b2dad5e2 req-94a64348-b922-4b79-a007-74f7705c8a19 service nova] [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] Received event network-changed-58359077-282b-4662-a530-b190af2de477 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 853.277875] env[62096]: DEBUG nova.compute.manager [req-c25a0a33-2ad8-48b4-adc0-6de9b2dad5e2 req-94a64348-b922-4b79-a007-74f7705c8a19 service nova] [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] Refreshing instance network info cache due to event network-changed-58359077-282b-4662-a530-b190af2de477. {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 853.278130] env[62096]: DEBUG oslo_concurrency.lockutils [req-c25a0a33-2ad8-48b4-adc0-6de9b2dad5e2 req-94a64348-b922-4b79-a007-74f7705c8a19 service nova] Acquiring lock "refresh_cache-6cc2a2c1-613d-40bd-a375-424b84b66ac9" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 853.278213] env[62096]: DEBUG oslo_concurrency.lockutils [req-c25a0a33-2ad8-48b4-adc0-6de9b2dad5e2 req-94a64348-b922-4b79-a007-74f7705c8a19 service nova] Acquired lock "refresh_cache-6cc2a2c1-613d-40bd-a375-424b84b66ac9" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 853.278468] env[62096]: DEBUG nova.network.neutron [req-c25a0a33-2ad8-48b4-adc0-6de9b2dad5e2 req-94a64348-b922-4b79-a007-74f7705c8a19 service nova] [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] Refreshing network info cache for port 58359077-282b-4662-a530-b190af2de477 {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 853.278886] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-c25a0a33-2ad8-48b4-adc0-6de9b2dad5e2 req-94a64348-b922-4b79-a007-74f7705c8a19 service nova] Expecting reply to msg 85b1a90c201742dfbd8f686293e70ac5 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 853.286211] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 85b1a90c201742dfbd8f686293e70ac5 [ 853.379207] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-9083a630-3722-46bd-841d-5e4bacb9e547 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Expecting reply to msg 59e3c7dd01af49b783bfe1593463516d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 853.408078] env[62096]: ERROR nova.compute.manager [None req-5ad96e0f-1e23-473c-97f9-ade218cb855a tempest-InstanceActionsTestJSON-629359483 tempest-InstanceActionsTestJSON-629359483-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 58359077-282b-4662-a530-b190af2de477, please check neutron logs for more information. [ 853.408078] env[62096]: ERROR nova.compute.manager Traceback (most recent call last): [ 853.408078] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 853.408078] env[62096]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 853.408078] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 853.408078] env[62096]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 853.408078] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 853.408078] env[62096]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 853.408078] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 853.408078] env[62096]: ERROR nova.compute.manager self.force_reraise() [ 853.408078] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 853.408078] env[62096]: ERROR nova.compute.manager raise self.value [ 853.408078] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 853.408078] env[62096]: ERROR nova.compute.manager updated_port = self._update_port( [ 853.408078] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 853.408078] env[62096]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 853.408550] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 853.408550] env[62096]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 853.408550] env[62096]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 58359077-282b-4662-a530-b190af2de477, please check neutron logs for more information. [ 853.408550] env[62096]: ERROR nova.compute.manager [ 853.408550] env[62096]: Traceback (most recent call last): [ 853.408550] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 853.408550] env[62096]: listener.cb(fileno) [ 853.408550] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 853.408550] env[62096]: result = function(*args, **kwargs) [ 853.408550] env[62096]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 853.408550] env[62096]: return func(*args, **kwargs) [ 853.408550] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 853.408550] env[62096]: raise e [ 853.408550] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 853.408550] env[62096]: nwinfo = self.network_api.allocate_for_instance( [ 853.408550] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 853.408550] env[62096]: created_port_ids = self._update_ports_for_instance( [ 853.408550] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 853.408550] env[62096]: with excutils.save_and_reraise_exception(): [ 853.408550] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 853.408550] env[62096]: self.force_reraise() [ 853.408550] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 853.408550] env[62096]: raise self.value [ 853.408550] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 853.408550] env[62096]: updated_port = self._update_port( [ 853.408550] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 853.408550] env[62096]: _ensure_no_port_binding_failure(port) [ 853.408550] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 853.408550] env[62096]: raise exception.PortBindingFailed(port_id=port['id']) [ 853.409295] env[62096]: nova.exception.PortBindingFailed: Binding failed for port 58359077-282b-4662-a530-b190af2de477, please check neutron logs for more information. [ 853.409295] env[62096]: Removing descriptor: 14 [ 853.424185] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 59e3c7dd01af49b783bfe1593463516d [ 853.498317] env[62096]: DEBUG nova.compute.manager [None req-3fd8e1c2-0ecc-40e5-90b0-01e465c5f366 tempest-ServersV294TestFqdnHostnames-2113678617 tempest-ServersV294TestFqdnHostnames-2113678617-project-member] [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] Starting instance... {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 853.500151] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-3fd8e1c2-0ecc-40e5-90b0-01e465c5f366 tempest-ServersV294TestFqdnHostnames-2113678617 tempest-ServersV294TestFqdnHostnames-2113678617-project-member] Expecting reply to msg 2fb6db3f001941dc84a3cf16694d5084 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 853.511298] env[62096]: DEBUG nova.scheduler.client.report [None req-c8a2d481-d63a-4d02-82f0-f3787fb45049 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Updated inventory for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 with generation 91 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:957}} [ 853.511540] env[62096]: DEBUG nova.compute.provider_tree [None req-c8a2d481-d63a-4d02-82f0-f3787fb45049 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Updating resource provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 generation from 91 to 92 during operation: update_inventory {{(pid=62096) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 853.511715] env[62096]: DEBUG nova.compute.provider_tree [None req-c8a2d481-d63a-4d02-82f0-f3787fb45049 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Updating inventory in ProviderTree for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 853.514171] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c8a2d481-d63a-4d02-82f0-f3787fb45049 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Expecting reply to msg bb35d9e96b1c4b1eadeaf8e05c2b1d4f in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 853.527145] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bb35d9e96b1c4b1eadeaf8e05c2b1d4f [ 853.532799] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2fb6db3f001941dc84a3cf16694d5084 [ 853.682738] env[62096]: DEBUG nova.compute.manager [None req-5ad96e0f-1e23-473c-97f9-ade218cb855a tempest-InstanceActionsTestJSON-629359483 tempest-InstanceActionsTestJSON-629359483-project-member] [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] Start spawning the instance on the hypervisor. {{(pid=62096) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 853.707847] env[62096]: DEBUG nova.virt.hardware [None req-5ad96e0f-1e23-473c-97f9-ade218cb855a tempest-InstanceActionsTestJSON-629359483 tempest-InstanceActionsTestJSON-629359483-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-19T11:52:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-19T11:51:55Z,direct_url=,disk_format='vmdk',id=fb764baa-3805-45c4-a694-aa91b0932110,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bd76085d004b408bbcf1ebf76ba2593e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-19T11:51:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 853.708194] env[62096]: DEBUG nova.virt.hardware [None req-5ad96e0f-1e23-473c-97f9-ade218cb855a tempest-InstanceActionsTestJSON-629359483 tempest-InstanceActionsTestJSON-629359483-project-member] Flavor limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 853.708387] env[62096]: DEBUG nova.virt.hardware [None req-5ad96e0f-1e23-473c-97f9-ade218cb855a tempest-InstanceActionsTestJSON-629359483 tempest-InstanceActionsTestJSON-629359483-project-member] Image limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 853.708579] env[62096]: DEBUG nova.virt.hardware [None req-5ad96e0f-1e23-473c-97f9-ade218cb855a tempest-InstanceActionsTestJSON-629359483 tempest-InstanceActionsTestJSON-629359483-project-member] Flavor pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 853.708833] env[62096]: DEBUG nova.virt.hardware [None req-5ad96e0f-1e23-473c-97f9-ade218cb855a tempest-InstanceActionsTestJSON-629359483 tempest-InstanceActionsTestJSON-629359483-project-member] Image pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 853.709001] env[62096]: DEBUG nova.virt.hardware [None req-5ad96e0f-1e23-473c-97f9-ade218cb855a tempest-InstanceActionsTestJSON-629359483 tempest-InstanceActionsTestJSON-629359483-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 853.709209] env[62096]: DEBUG nova.virt.hardware [None req-5ad96e0f-1e23-473c-97f9-ade218cb855a tempest-InstanceActionsTestJSON-629359483 tempest-InstanceActionsTestJSON-629359483-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 853.709368] env[62096]: DEBUG nova.virt.hardware [None req-5ad96e0f-1e23-473c-97f9-ade218cb855a tempest-InstanceActionsTestJSON-629359483 tempest-InstanceActionsTestJSON-629359483-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 853.709529] env[62096]: DEBUG nova.virt.hardware [None req-5ad96e0f-1e23-473c-97f9-ade218cb855a tempest-InstanceActionsTestJSON-629359483 tempest-InstanceActionsTestJSON-629359483-project-member] Got 1 possible topologies {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 853.709689] env[62096]: DEBUG nova.virt.hardware [None req-5ad96e0f-1e23-473c-97f9-ade218cb855a tempest-InstanceActionsTestJSON-629359483 tempest-InstanceActionsTestJSON-629359483-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 853.709858] env[62096]: DEBUG nova.virt.hardware [None req-5ad96e0f-1e23-473c-97f9-ade218cb855a tempest-InstanceActionsTestJSON-629359483 tempest-InstanceActionsTestJSON-629359483-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 853.710703] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a63d7bc4-c570-4583-abd6-cfdb421068bc {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.718670] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4ae8a6f-d570-40c6-8c77-024b97487363 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.732404] env[62096]: ERROR nova.compute.manager [None req-5ad96e0f-1e23-473c-97f9-ade218cb855a tempest-InstanceActionsTestJSON-629359483 tempest-InstanceActionsTestJSON-629359483-project-member] [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 58359077-282b-4662-a530-b190af2de477, please check neutron logs for more information. [ 853.732404] env[62096]: ERROR nova.compute.manager [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] Traceback (most recent call last): [ 853.732404] env[62096]: ERROR nova.compute.manager [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 853.732404] env[62096]: ERROR nova.compute.manager [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] yield resources [ 853.732404] env[62096]: ERROR nova.compute.manager [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 853.732404] env[62096]: ERROR nova.compute.manager [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] self.driver.spawn(context, instance, image_meta, [ 853.732404] env[62096]: ERROR nova.compute.manager [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 853.732404] env[62096]: ERROR nova.compute.manager [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 853.732404] env[62096]: ERROR nova.compute.manager [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 853.732404] env[62096]: ERROR nova.compute.manager [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] vm_ref = self.build_virtual_machine(instance, [ 853.732404] env[62096]: ERROR nova.compute.manager [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 853.732803] env[62096]: ERROR nova.compute.manager [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] vif_infos = vmwarevif.get_vif_info(self._session, [ 853.732803] env[62096]: ERROR nova.compute.manager [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 853.732803] env[62096]: ERROR nova.compute.manager [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] for vif in network_info: [ 853.732803] env[62096]: ERROR nova.compute.manager [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 853.732803] env[62096]: ERROR nova.compute.manager [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] return self._sync_wrapper(fn, *args, **kwargs) [ 853.732803] env[62096]: ERROR nova.compute.manager [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 853.732803] env[62096]: ERROR nova.compute.manager [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] self.wait() [ 853.732803] env[62096]: ERROR nova.compute.manager [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 853.732803] env[62096]: ERROR nova.compute.manager [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] self[:] = self._gt.wait() [ 853.732803] env[62096]: ERROR nova.compute.manager [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 853.732803] env[62096]: ERROR nova.compute.manager [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] return self._exit_event.wait() [ 853.732803] env[62096]: ERROR nova.compute.manager [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 853.732803] env[62096]: ERROR nova.compute.manager [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] current.throw(*self._exc) [ 853.733186] env[62096]: ERROR nova.compute.manager [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 853.733186] env[62096]: ERROR nova.compute.manager [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] result = function(*args, **kwargs) [ 853.733186] env[62096]: ERROR nova.compute.manager [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 853.733186] env[62096]: ERROR nova.compute.manager [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] return func(*args, **kwargs) [ 853.733186] env[62096]: ERROR nova.compute.manager [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 853.733186] env[62096]: ERROR nova.compute.manager [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] raise e [ 853.733186] env[62096]: ERROR nova.compute.manager [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 853.733186] env[62096]: ERROR nova.compute.manager [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] nwinfo = self.network_api.allocate_for_instance( [ 853.733186] env[62096]: ERROR nova.compute.manager [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 853.733186] env[62096]: ERROR nova.compute.manager [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] created_port_ids = self._update_ports_for_instance( [ 853.733186] env[62096]: ERROR nova.compute.manager [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 853.733186] env[62096]: ERROR nova.compute.manager [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] with excutils.save_and_reraise_exception(): [ 853.733186] env[62096]: ERROR nova.compute.manager [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 853.733572] env[62096]: ERROR nova.compute.manager [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] self.force_reraise() [ 853.733572] env[62096]: ERROR nova.compute.manager [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 853.733572] env[62096]: ERROR nova.compute.manager [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] raise self.value [ 853.733572] env[62096]: ERROR nova.compute.manager [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 853.733572] env[62096]: ERROR nova.compute.manager [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] updated_port = self._update_port( [ 853.733572] env[62096]: ERROR nova.compute.manager [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 853.733572] env[62096]: ERROR nova.compute.manager [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] _ensure_no_port_binding_failure(port) [ 853.733572] env[62096]: ERROR nova.compute.manager [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 853.733572] env[62096]: ERROR nova.compute.manager [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] raise exception.PortBindingFailed(port_id=port['id']) [ 853.733572] env[62096]: ERROR nova.compute.manager [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] nova.exception.PortBindingFailed: Binding failed for port 58359077-282b-4662-a530-b190af2de477, please check neutron logs for more information. [ 853.733572] env[62096]: ERROR nova.compute.manager [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] [ 853.733572] env[62096]: INFO nova.compute.manager [None req-5ad96e0f-1e23-473c-97f9-ade218cb855a tempest-InstanceActionsTestJSON-629359483 tempest-InstanceActionsTestJSON-629359483-project-member] [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] Terminating instance [ 853.734756] env[62096]: DEBUG oslo_concurrency.lockutils [None req-5ad96e0f-1e23-473c-97f9-ade218cb855a tempest-InstanceActionsTestJSON-629359483 tempest-InstanceActionsTestJSON-629359483-project-member] Acquiring lock "refresh_cache-6cc2a2c1-613d-40bd-a375-424b84b66ac9" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 853.798362] env[62096]: DEBUG nova.network.neutron [req-c25a0a33-2ad8-48b4-adc0-6de9b2dad5e2 req-94a64348-b922-4b79-a007-74f7705c8a19 service nova] [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 853.879335] env[62096]: DEBUG nova.network.neutron [req-c25a0a33-2ad8-48b4-adc0-6de9b2dad5e2 req-94a64348-b922-4b79-a007-74f7705c8a19 service nova] [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 853.879798] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-c25a0a33-2ad8-48b4-adc0-6de9b2dad5e2 req-94a64348-b922-4b79-a007-74f7705c8a19 service nova] Expecting reply to msg 79f9266bd8364aa2a8605a2b3104a7f9 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 853.891538] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 79f9266bd8364aa2a8605a2b3104a7f9 [ 853.901885] env[62096]: DEBUG nova.virt.hardware [None req-9083a630-3722-46bd-841d-5e4bacb9e547 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-19T11:52:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-19T11:51:55Z,direct_url=,disk_format='vmdk',id=fb764baa-3805-45c4-a694-aa91b0932110,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bd76085d004b408bbcf1ebf76ba2593e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-19T11:51:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 853.902132] env[62096]: DEBUG nova.virt.hardware [None req-9083a630-3722-46bd-841d-5e4bacb9e547 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Flavor limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 853.902277] env[62096]: DEBUG nova.virt.hardware [None req-9083a630-3722-46bd-841d-5e4bacb9e547 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Image limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 853.902456] env[62096]: DEBUG nova.virt.hardware [None req-9083a630-3722-46bd-841d-5e4bacb9e547 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Flavor pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 853.902598] env[62096]: DEBUG nova.virt.hardware [None req-9083a630-3722-46bd-841d-5e4bacb9e547 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Image pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 853.902744] env[62096]: DEBUG nova.virt.hardware [None req-9083a630-3722-46bd-841d-5e4bacb9e547 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 853.902941] env[62096]: DEBUG nova.virt.hardware [None req-9083a630-3722-46bd-841d-5e4bacb9e547 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 853.903098] env[62096]: DEBUG nova.virt.hardware [None req-9083a630-3722-46bd-841d-5e4bacb9e547 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 853.903261] env[62096]: DEBUG nova.virt.hardware [None req-9083a630-3722-46bd-841d-5e4bacb9e547 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Got 1 possible topologies {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 853.903417] env[62096]: DEBUG nova.virt.hardware [None req-9083a630-3722-46bd-841d-5e4bacb9e547 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 853.903582] env[62096]: DEBUG nova.virt.hardware [None req-9083a630-3722-46bd-841d-5e4bacb9e547 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 853.904675] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0401c1f-9293-420a-8a25-b054b71e61e7 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.912857] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a29f3cb-c856-4072-89b3-af6c60052e52 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.925857] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-9083a630-3722-46bd-841d-5e4bacb9e547 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] [instance: 250ef7e7-266b-451d-8627-9cce211d4e83] Instance VIF info [] {{(pid=62096) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 853.931334] env[62096]: DEBUG oslo.service.loopingcall [None req-9083a630-3722-46bd-841d-5e4bacb9e547 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 853.931557] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 250ef7e7-266b-451d-8627-9cce211d4e83] Creating VM on the ESX host {{(pid=62096) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 853.931755] env[62096]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-44fc813a-2fa3-4016-97ab-683ade9a2bd9 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.951403] env[62096]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 853.951403] env[62096]: value = "task-397402" [ 853.951403] env[62096]: _type = "Task" [ 853.951403] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.961138] env[62096]: DEBUG oslo_vmware.api [-] Task: {'id': task-397402, 'name': CreateVM_Task} progress is 0%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.016757] env[62096]: DEBUG oslo_concurrency.lockutils [None req-c8a2d481-d63a-4d02-82f0-f3787fb45049 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.362s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 854.017491] env[62096]: DEBUG nova.compute.manager [None req-c8a2d481-d63a-4d02-82f0-f3787fb45049 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] [instance: a7838568-9d47-4306-8bb6-9ad74ab1feb3] Start building networks asynchronously for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 854.019503] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c8a2d481-d63a-4d02-82f0-f3787fb45049 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Expecting reply to msg 84c68811f61b4dc895983d1b168336c0 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 854.020665] env[62096]: DEBUG oslo_concurrency.lockutils [None req-31b37f0f-ed04-4f12-864d-f77713fa80b1 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.836s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 854.022702] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-31b37f0f-ed04-4f12-864d-f77713fa80b1 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Expecting reply to msg e00e7b91891840a88f26c90a88544774 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 854.024644] env[62096]: DEBUG oslo_concurrency.lockutils [None req-3fd8e1c2-0ecc-40e5-90b0-01e465c5f366 tempest-ServersV294TestFqdnHostnames-2113678617 tempest-ServersV294TestFqdnHostnames-2113678617-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 854.060385] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 84c68811f61b4dc895983d1b168336c0 [ 854.079214] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e00e7b91891840a88f26c90a88544774 [ 854.383875] env[62096]: DEBUG oslo_concurrency.lockutils [req-c25a0a33-2ad8-48b4-adc0-6de9b2dad5e2 req-94a64348-b922-4b79-a007-74f7705c8a19 service nova] Releasing lock "refresh_cache-6cc2a2c1-613d-40bd-a375-424b84b66ac9" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 854.384371] env[62096]: DEBUG oslo_concurrency.lockutils [None req-5ad96e0f-1e23-473c-97f9-ade218cb855a tempest-InstanceActionsTestJSON-629359483 tempest-InstanceActionsTestJSON-629359483-project-member] Acquired lock "refresh_cache-6cc2a2c1-613d-40bd-a375-424b84b66ac9" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 854.384555] env[62096]: DEBUG nova.network.neutron [None req-5ad96e0f-1e23-473c-97f9-ade218cb855a tempest-InstanceActionsTestJSON-629359483 tempest-InstanceActionsTestJSON-629359483-project-member] [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 854.385032] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-5ad96e0f-1e23-473c-97f9-ade218cb855a tempest-InstanceActionsTestJSON-629359483 tempest-InstanceActionsTestJSON-629359483-project-member] Expecting reply to msg 5f80fd1eee1b438b964d6f5eaab47f28 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 854.392246] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5f80fd1eee1b438b964d6f5eaab47f28 [ 854.460196] env[62096]: DEBUG oslo_vmware.api [-] Task: {'id': task-397402, 'name': CreateVM_Task, 'duration_secs': 0.272326} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.460367] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 250ef7e7-266b-451d-8627-9cce211d4e83] Created VM on the ESX host {{(pid=62096) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 854.460778] env[62096]: DEBUG oslo_concurrency.lockutils [None req-9083a630-3722-46bd-841d-5e4bacb9e547 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 854.460933] env[62096]: DEBUG oslo_concurrency.lockutils [None req-9083a630-3722-46bd-841d-5e4bacb9e547 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Acquired lock "[datastore2] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 854.461269] env[62096]: DEBUG oslo_concurrency.lockutils [None req-9083a630-3722-46bd-841d-5e4bacb9e547 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 854.461556] env[62096]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-70eee4a8-e71e-4046-a33a-b27ae772a71a {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.465716] env[62096]: DEBUG oslo_vmware.api [None req-9083a630-3722-46bd-841d-5e4bacb9e547 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Waiting for the task: (returnval){ [ 854.465716] env[62096]: value = "session[5264b2d0-7e3b-3d3c-bfd1-bd630cad6eae]529497e8-5a68-3a65-efca-7a1bf73374f4" [ 854.465716] env[62096]: _type = "Task" [ 854.465716] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.473593] env[62096]: DEBUG oslo_vmware.api [None req-9083a630-3722-46bd-841d-5e4bacb9e547 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Task: {'id': session[5264b2d0-7e3b-3d3c-bfd1-bd630cad6eae]529497e8-5a68-3a65-efca-7a1bf73374f4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.531809] env[62096]: DEBUG nova.compute.utils [None req-c8a2d481-d63a-4d02-82f0-f3787fb45049 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Using /dev/sd instead of None {{(pid=62096) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 854.531809] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c8a2d481-d63a-4d02-82f0-f3787fb45049 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Expecting reply to msg 9fc04f7fa11c43a69ba96232a061a126 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 854.532149] env[62096]: DEBUG nova.compute.manager [None req-c8a2d481-d63a-4d02-82f0-f3787fb45049 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] [instance: a7838568-9d47-4306-8bb6-9ad74ab1feb3] Not allocating networking since 'none' was specified. {{(pid=62096) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 854.574497] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9fc04f7fa11c43a69ba96232a061a126 [ 854.759264] env[62096]: DEBUG oslo_service.periodic_task [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62096) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 854.759507] env[62096]: DEBUG oslo_service.periodic_task [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62096) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 854.760161] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg 56c223ede51d4e318e3cb7c1834c1f89 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 854.775129] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 56c223ede51d4e318e3cb7c1834c1f89 [ 854.798770] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34fd1721-e2de-491c-b17f-f13c1ca187f1 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.810014] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d1cb0c5-c42b-4454-a7ca-772a7dc69235 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.844892] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ccab88c-f099-4ed5-839d-7df74255e781 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.852800] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18aa743a-35da-4ba0-b2ac-612682bcda4f {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.868520] env[62096]: DEBUG nova.compute.provider_tree [None req-31b37f0f-ed04-4f12-864d-f77713fa80b1 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 854.869026] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-31b37f0f-ed04-4f12-864d-f77713fa80b1 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Expecting reply to msg 57db5be3cc504d008433fa225bb3118a in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 854.882893] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 57db5be3cc504d008433fa225bb3118a [ 854.901085] env[62096]: DEBUG nova.network.neutron [None req-5ad96e0f-1e23-473c-97f9-ade218cb855a tempest-InstanceActionsTestJSON-629359483 tempest-InstanceActionsTestJSON-629359483-project-member] [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 854.976266] env[62096]: DEBUG oslo_vmware.api [None req-9083a630-3722-46bd-841d-5e4bacb9e547 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Task: {'id': session[5264b2d0-7e3b-3d3c-bfd1-bd630cad6eae]529497e8-5a68-3a65-efca-7a1bf73374f4, 'name': SearchDatastore_Task, 'duration_secs': 0.010894} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.976968] env[62096]: DEBUG oslo_concurrency.lockutils [None req-9083a630-3722-46bd-841d-5e4bacb9e547 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Releasing lock "[datastore2] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 854.976968] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-9083a630-3722-46bd-841d-5e4bacb9e547 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] [instance: 250ef7e7-266b-451d-8627-9cce211d4e83] Processing image fb764baa-3805-45c4-a694-aa91b0932110 {{(pid=62096) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 854.978311] env[62096]: DEBUG oslo_concurrency.lockutils [None req-9083a630-3722-46bd-841d-5e4bacb9e547 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110/fb764baa-3805-45c4-a694-aa91b0932110.vmdk" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 854.978311] env[62096]: DEBUG oslo_concurrency.lockutils [None req-9083a630-3722-46bd-841d-5e4bacb9e547 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Acquired lock "[datastore2] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110/fb764baa-3805-45c4-a694-aa91b0932110.vmdk" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 854.978311] env[62096]: DEBUG nova.virt.vmwareapi.ds_util [None req-9083a630-3722-46bd-841d-5e4bacb9e547 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62096) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 854.978311] env[62096]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-daa9b881-436b-4f79-add4-08b876ec382c {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.985433] env[62096]: DEBUG nova.virt.vmwareapi.ds_util [None req-9083a630-3722-46bd-841d-5e4bacb9e547 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62096) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 854.985625] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-9083a630-3722-46bd-841d-5e4bacb9e547 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62096) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 854.987099] env[62096]: DEBUG nova.network.neutron [None req-5ad96e0f-1e23-473c-97f9-ade218cb855a tempest-InstanceActionsTestJSON-629359483 tempest-InstanceActionsTestJSON-629359483-project-member] [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 854.987760] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-5ad96e0f-1e23-473c-97f9-ade218cb855a tempest-InstanceActionsTestJSON-629359483 tempest-InstanceActionsTestJSON-629359483-project-member] Expecting reply to msg bc47752746304f2ba7aff71ac0d7f4c5 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 854.988947] env[62096]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ef467baa-0e48-4749-89de-bf0273607def {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.996843] env[62096]: DEBUG oslo_vmware.api [None req-9083a630-3722-46bd-841d-5e4bacb9e547 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Waiting for the task: (returnval){ [ 854.996843] env[62096]: value = "session[5264b2d0-7e3b-3d3c-bfd1-bd630cad6eae]5298a6d2-478f-7755-6500-82f93647b314" [ 854.996843] env[62096]: _type = "Task" [ 854.996843] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.006029] env[62096]: DEBUG oslo_vmware.api [None req-9083a630-3722-46bd-841d-5e4bacb9e547 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Task: {'id': session[5264b2d0-7e3b-3d3c-bfd1-bd630cad6eae]5298a6d2-478f-7755-6500-82f93647b314, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.007048] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bc47752746304f2ba7aff71ac0d7f4c5 [ 855.032826] env[62096]: DEBUG nova.compute.manager [None req-c8a2d481-d63a-4d02-82f0-f3787fb45049 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] [instance: a7838568-9d47-4306-8bb6-9ad74ab1feb3] Start building block device mappings for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 855.034829] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c8a2d481-d63a-4d02-82f0-f3787fb45049 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Expecting reply to msg 7cf9576da13b4cb3b42cd6c28c3d581f in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 855.070721] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7cf9576da13b4cb3b42cd6c28c3d581f [ 855.272112] env[62096]: DEBUG oslo_service.periodic_task [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62096) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 855.272276] env[62096]: DEBUG nova.compute.manager [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Starting heal instance info cache {{(pid=62096) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9926}} [ 855.272413] env[62096]: DEBUG nova.compute.manager [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Rebuilding the list of instances to heal {{(pid=62096) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 855.272936] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg 205a5fb4243941a782eccf4cf84faeaa in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 855.288783] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 205a5fb4243941a782eccf4cf84faeaa [ 855.300562] env[62096]: DEBUG nova.compute.manager [req-7a7c0a68-f67b-473d-ba92-2993a4a5ffd1 req-aca3529a-6d1e-441a-9c10-2067c6c45e0d service nova] [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] Received event network-vif-deleted-58359077-282b-4662-a530-b190af2de477 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 855.372030] env[62096]: DEBUG nova.scheduler.client.report [None req-31b37f0f-ed04-4f12-864d-f77713fa80b1 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 855.374300] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-31b37f0f-ed04-4f12-864d-f77713fa80b1 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Expecting reply to msg e2745c4b7a414487a83aeddad7bd13f2 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 855.386994] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e2745c4b7a414487a83aeddad7bd13f2 [ 855.495392] env[62096]: DEBUG oslo_concurrency.lockutils [None req-5ad96e0f-1e23-473c-97f9-ade218cb855a tempest-InstanceActionsTestJSON-629359483 tempest-InstanceActionsTestJSON-629359483-project-member] Releasing lock "refresh_cache-6cc2a2c1-613d-40bd-a375-424b84b66ac9" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 855.495805] env[62096]: DEBUG nova.compute.manager [None req-5ad96e0f-1e23-473c-97f9-ade218cb855a tempest-InstanceActionsTestJSON-629359483 tempest-InstanceActionsTestJSON-629359483-project-member] [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] Start destroying the instance on the hypervisor. {{(pid=62096) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 855.495998] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-5ad96e0f-1e23-473c-97f9-ade218cb855a tempest-InstanceActionsTestJSON-629359483 tempest-InstanceActionsTestJSON-629359483-project-member] [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] Destroying instance {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 855.496321] env[62096]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-52895707-a20a-4131-8a30-44afa8f03c75 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.507219] env[62096]: DEBUG oslo_vmware.api [None req-9083a630-3722-46bd-841d-5e4bacb9e547 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Task: {'id': session[5264b2d0-7e3b-3d3c-bfd1-bd630cad6eae]5298a6d2-478f-7755-6500-82f93647b314, 'name': SearchDatastore_Task, 'duration_secs': 0.008612} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.508672] env[62096]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0372fb42-664c-41b3-b4c1-70fd0a1f36a5 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.513220] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a052ec35-a499-4f72-8ae7-ec178a6e5307 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.526058] env[62096]: DEBUG oslo_vmware.api [None req-9083a630-3722-46bd-841d-5e4bacb9e547 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Waiting for the task: (returnval){ [ 855.526058] env[62096]: value = "session[5264b2d0-7e3b-3d3c-bfd1-bd630cad6eae]52d0715d-81fa-dd4e-5b56-462eef9a133d" [ 855.526058] env[62096]: _type = "Task" [ 855.526058] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.533882] env[62096]: WARNING nova.virt.vmwareapi.vmops [None req-5ad96e0f-1e23-473c-97f9-ade218cb855a tempest-InstanceActionsTestJSON-629359483 tempest-InstanceActionsTestJSON-629359483-project-member] [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 6cc2a2c1-613d-40bd-a375-424b84b66ac9 could not be found. [ 855.534168] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-5ad96e0f-1e23-473c-97f9-ade218cb855a tempest-InstanceActionsTestJSON-629359483 tempest-InstanceActionsTestJSON-629359483-project-member] [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] Instance destroyed {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 855.534354] env[62096]: INFO nova.compute.manager [None req-5ad96e0f-1e23-473c-97f9-ade218cb855a tempest-InstanceActionsTestJSON-629359483 tempest-InstanceActionsTestJSON-629359483-project-member] [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] Took 0.04 seconds to destroy the instance on the hypervisor. [ 855.534580] env[62096]: DEBUG oslo.service.loopingcall [None req-5ad96e0f-1e23-473c-97f9-ade218cb855a tempest-InstanceActionsTestJSON-629359483 tempest-InstanceActionsTestJSON-629359483-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 855.537399] env[62096]: DEBUG nova.compute.manager [-] [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 855.537504] env[62096]: DEBUG nova.network.neutron [-] [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 855.541242] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c8a2d481-d63a-4d02-82f0-f3787fb45049 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Expecting reply to msg da6bd242b6324b61ba0b3fc5ccefb265 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 855.542680] env[62096]: DEBUG oslo_vmware.api [None req-9083a630-3722-46bd-841d-5e4bacb9e547 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Task: {'id': session[5264b2d0-7e3b-3d3c-bfd1-bd630cad6eae]52d0715d-81fa-dd4e-5b56-462eef9a133d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.557059] env[62096]: DEBUG nova.network.neutron [-] [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 855.557571] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 728a0cde36624889b7c08ae72b642896 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 855.564511] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 728a0cde36624889b7c08ae72b642896 [ 855.573264] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg da6bd242b6324b61ba0b3fc5ccefb265 [ 855.777517] env[62096]: DEBUG nova.compute.manager [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] Skipping network cache update for instance because it is Building. {{(pid=62096) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9939}} [ 855.777734] env[62096]: DEBUG nova.compute.manager [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] Skipping network cache update for instance because it is Building. {{(pid=62096) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9939}} [ 855.777911] env[62096]: DEBUG nova.compute.manager [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] Skipping network cache update for instance because it is Building. {{(pid=62096) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9939}} [ 855.778081] env[62096]: DEBUG nova.compute.manager [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] Skipping network cache update for instance because it is Building. {{(pid=62096) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9939}} [ 855.778241] env[62096]: DEBUG nova.compute.manager [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] [instance: a7838568-9d47-4306-8bb6-9ad74ab1feb3] Skipping network cache update for instance because it is Building. {{(pid=62096) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9939}} [ 855.778501] env[62096]: DEBUG oslo_concurrency.lockutils [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Acquiring lock "refresh_cache-250ef7e7-266b-451d-8627-9cce211d4e83" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 855.778654] env[62096]: DEBUG oslo_concurrency.lockutils [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Acquired lock "refresh_cache-250ef7e7-266b-451d-8627-9cce211d4e83" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 855.778799] env[62096]: DEBUG nova.network.neutron [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] [instance: 250ef7e7-266b-451d-8627-9cce211d4e83] Forcefully refreshing network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2004}} [ 855.779009] env[62096]: DEBUG nova.objects.instance [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Lazy-loading 'info_cache' on Instance uuid 250ef7e7-266b-451d-8627-9cce211d4e83 {{(pid=62096) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 855.779661] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg 32be298171094adbbf292e4e9371a874 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 855.792241] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 32be298171094adbbf292e4e9371a874 [ 855.876787] env[62096]: DEBUG oslo_concurrency.lockutils [None req-31b37f0f-ed04-4f12-864d-f77713fa80b1 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.856s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 855.877415] env[62096]: ERROR nova.compute.manager [None req-31b37f0f-ed04-4f12-864d-f77713fa80b1 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 81da80c1-1e68-4313-8e6e-e7163fdac3d3, please check neutron logs for more information. [ 855.877415] env[62096]: ERROR nova.compute.manager [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] Traceback (most recent call last): [ 855.877415] env[62096]: ERROR nova.compute.manager [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 855.877415] env[62096]: ERROR nova.compute.manager [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] self.driver.spawn(context, instance, image_meta, [ 855.877415] env[62096]: ERROR nova.compute.manager [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 855.877415] env[62096]: ERROR nova.compute.manager [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] self._vmops.spawn(context, instance, image_meta, injected_files, [ 855.877415] env[62096]: ERROR nova.compute.manager [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 855.877415] env[62096]: ERROR nova.compute.manager [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] vm_ref = self.build_virtual_machine(instance, [ 855.877415] env[62096]: ERROR nova.compute.manager [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 855.877415] env[62096]: ERROR nova.compute.manager [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] vif_infos = vmwarevif.get_vif_info(self._session, [ 855.877415] env[62096]: ERROR nova.compute.manager [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 855.877771] env[62096]: ERROR nova.compute.manager [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] for vif in network_info: [ 855.877771] env[62096]: ERROR nova.compute.manager [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 855.877771] env[62096]: ERROR nova.compute.manager [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] return self._sync_wrapper(fn, *args, **kwargs) [ 855.877771] env[62096]: ERROR nova.compute.manager [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 855.877771] env[62096]: ERROR nova.compute.manager [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] self.wait() [ 855.877771] env[62096]: ERROR nova.compute.manager [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 855.877771] env[62096]: ERROR nova.compute.manager [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] self[:] = self._gt.wait() [ 855.877771] env[62096]: ERROR nova.compute.manager [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 855.877771] env[62096]: ERROR nova.compute.manager [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] return self._exit_event.wait() [ 855.877771] env[62096]: ERROR nova.compute.manager [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 855.877771] env[62096]: ERROR nova.compute.manager [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] current.throw(*self._exc) [ 855.877771] env[62096]: ERROR nova.compute.manager [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 855.877771] env[62096]: ERROR nova.compute.manager [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] result = function(*args, **kwargs) [ 855.878151] env[62096]: ERROR nova.compute.manager [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 855.878151] env[62096]: ERROR nova.compute.manager [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] return func(*args, **kwargs) [ 855.878151] env[62096]: ERROR nova.compute.manager [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 855.878151] env[62096]: ERROR nova.compute.manager [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] raise e [ 855.878151] env[62096]: ERROR nova.compute.manager [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 855.878151] env[62096]: ERROR nova.compute.manager [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] nwinfo = self.network_api.allocate_for_instance( [ 855.878151] env[62096]: ERROR nova.compute.manager [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 855.878151] env[62096]: ERROR nova.compute.manager [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] created_port_ids = self._update_ports_for_instance( [ 855.878151] env[62096]: ERROR nova.compute.manager [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 855.878151] env[62096]: ERROR nova.compute.manager [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] with excutils.save_and_reraise_exception(): [ 855.878151] env[62096]: ERROR nova.compute.manager [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 855.878151] env[62096]: ERROR nova.compute.manager [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] self.force_reraise() [ 855.878151] env[62096]: ERROR nova.compute.manager [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 855.878519] env[62096]: ERROR nova.compute.manager [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] raise self.value [ 855.878519] env[62096]: ERROR nova.compute.manager [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 855.878519] env[62096]: ERROR nova.compute.manager [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] updated_port = self._update_port( [ 855.878519] env[62096]: ERROR nova.compute.manager [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 855.878519] env[62096]: ERROR nova.compute.manager [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] _ensure_no_port_binding_failure(port) [ 855.878519] env[62096]: ERROR nova.compute.manager [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 855.878519] env[62096]: ERROR nova.compute.manager [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] raise exception.PortBindingFailed(port_id=port['id']) [ 855.878519] env[62096]: ERROR nova.compute.manager [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] nova.exception.PortBindingFailed: Binding failed for port 81da80c1-1e68-4313-8e6e-e7163fdac3d3, please check neutron logs for more information. [ 855.878519] env[62096]: ERROR nova.compute.manager [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] [ 855.878519] env[62096]: DEBUG nova.compute.utils [None req-31b37f0f-ed04-4f12-864d-f77713fa80b1 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] Binding failed for port 81da80c1-1e68-4313-8e6e-e7163fdac3d3, please check neutron logs for more information. {{(pid=62096) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 855.879290] env[62096]: DEBUG oslo_concurrency.lockutils [None req-0c343c31-990f-417d-b18d-e0dbd941468a tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.950s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 855.881212] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-0c343c31-990f-417d-b18d-e0dbd941468a tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Expecting reply to msg 06464e124dd047138483460f10e7c015 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 855.882530] env[62096]: DEBUG nova.compute.manager [None req-31b37f0f-ed04-4f12-864d-f77713fa80b1 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] Build of instance d5a6c627-bbab-49d4-a3bd-cb5b15264b18 was re-scheduled: Binding failed for port 81da80c1-1e68-4313-8e6e-e7163fdac3d3, please check neutron logs for more information. {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 855.882990] env[62096]: DEBUG nova.compute.manager [None req-31b37f0f-ed04-4f12-864d-f77713fa80b1 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] Unplugging VIFs for instance {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 855.883229] env[62096]: DEBUG oslo_concurrency.lockutils [None req-31b37f0f-ed04-4f12-864d-f77713fa80b1 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Acquiring lock "refresh_cache-d5a6c627-bbab-49d4-a3bd-cb5b15264b18" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 855.883375] env[62096]: DEBUG oslo_concurrency.lockutils [None req-31b37f0f-ed04-4f12-864d-f77713fa80b1 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Acquired lock "refresh_cache-d5a6c627-bbab-49d4-a3bd-cb5b15264b18" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 855.883533] env[62096]: DEBUG nova.network.neutron [None req-31b37f0f-ed04-4f12-864d-f77713fa80b1 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 855.883887] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-31b37f0f-ed04-4f12-864d-f77713fa80b1 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Expecting reply to msg 720f959a1135460eb787e79cf397b54c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 855.890409] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 720f959a1135460eb787e79cf397b54c [ 855.913351] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 06464e124dd047138483460f10e7c015 [ 856.036518] env[62096]: DEBUG oslo_vmware.api [None req-9083a630-3722-46bd-841d-5e4bacb9e547 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Task: {'id': session[5264b2d0-7e3b-3d3c-bfd1-bd630cad6eae]52d0715d-81fa-dd4e-5b56-462eef9a133d, 'name': SearchDatastore_Task, 'duration_secs': 0.009815} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.036720] env[62096]: DEBUG oslo_concurrency.lockutils [None req-9083a630-3722-46bd-841d-5e4bacb9e547 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Releasing lock "[datastore2] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110/fb764baa-3805-45c4-a694-aa91b0932110.vmdk" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 856.036949] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [None req-9083a630-3722-46bd-841d-5e4bacb9e547 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110/fb764baa-3805-45c4-a694-aa91b0932110.vmdk to [datastore2] 250ef7e7-266b-451d-8627-9cce211d4e83/250ef7e7-266b-451d-8627-9cce211d4e83.vmdk {{(pid=62096) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 856.037193] env[62096]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1b0de34f-2deb-47f6-8d31-56e4a1e0021c {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.042669] env[62096]: DEBUG oslo_vmware.api [None req-9083a630-3722-46bd-841d-5e4bacb9e547 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Waiting for the task: (returnval){ [ 856.042669] env[62096]: value = "task-397403" [ 856.042669] env[62096]: _type = "Task" [ 856.042669] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.043651] env[62096]: DEBUG nova.compute.manager [None req-c8a2d481-d63a-4d02-82f0-f3787fb45049 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] [instance: a7838568-9d47-4306-8bb6-9ad74ab1feb3] Start spawning the instance on the hypervisor. {{(pid=62096) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 856.053490] env[62096]: DEBUG oslo_vmware.api [None req-9083a630-3722-46bd-841d-5e4bacb9e547 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Task: {'id': task-397403, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.059846] env[62096]: DEBUG nova.network.neutron [-] [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 856.060271] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 78fb0626045a4a8392952648dd0fa26d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 856.073724] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 78fb0626045a4a8392952648dd0fa26d [ 856.079903] env[62096]: DEBUG nova.virt.hardware [None req-c8a2d481-d63a-4d02-82f0-f3787fb45049 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-19T11:52:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-19T11:51:55Z,direct_url=,disk_format='vmdk',id=fb764baa-3805-45c4-a694-aa91b0932110,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bd76085d004b408bbcf1ebf76ba2593e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-19T11:51:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 856.080159] env[62096]: DEBUG nova.virt.hardware [None req-c8a2d481-d63a-4d02-82f0-f3787fb45049 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Flavor limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 856.080562] env[62096]: DEBUG nova.virt.hardware [None req-c8a2d481-d63a-4d02-82f0-f3787fb45049 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Image limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 856.080562] env[62096]: DEBUG nova.virt.hardware [None req-c8a2d481-d63a-4d02-82f0-f3787fb45049 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Flavor pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 856.080696] env[62096]: DEBUG nova.virt.hardware [None req-c8a2d481-d63a-4d02-82f0-f3787fb45049 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Image pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 856.080750] env[62096]: DEBUG nova.virt.hardware [None req-c8a2d481-d63a-4d02-82f0-f3787fb45049 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 856.080950] env[62096]: DEBUG nova.virt.hardware [None req-c8a2d481-d63a-4d02-82f0-f3787fb45049 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 856.081097] env[62096]: DEBUG nova.virt.hardware [None req-c8a2d481-d63a-4d02-82f0-f3787fb45049 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 856.081258] env[62096]: DEBUG nova.virt.hardware [None req-c8a2d481-d63a-4d02-82f0-f3787fb45049 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Got 1 possible topologies {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 856.081415] env[62096]: DEBUG nova.virt.hardware [None req-c8a2d481-d63a-4d02-82f0-f3787fb45049 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 856.081584] env[62096]: DEBUG nova.virt.hardware [None req-c8a2d481-d63a-4d02-82f0-f3787fb45049 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 856.082670] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfad6bc8-c7d9-49ed-a3f9-769f58b34334 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.090507] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f82ba53b-1436-4552-af83-1a737c6ec894 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.104090] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-c8a2d481-d63a-4d02-82f0-f3787fb45049 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] [instance: a7838568-9d47-4306-8bb6-9ad74ab1feb3] Instance VIF info [] {{(pid=62096) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 856.109809] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8a2d481-d63a-4d02-82f0-f3787fb45049 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Creating folder: Project (8d66d135bbb44bf4ba61a2900da2cf9d). Parent ref: group-v107847. {{(pid=62096) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 856.110092] env[62096]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-33698ea7-ed35-4cae-888e-e7b047b4bf61 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.118541] env[62096]: INFO nova.virt.vmwareapi.vm_util [None req-c8a2d481-d63a-4d02-82f0-f3787fb45049 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Created folder: Project (8d66d135bbb44bf4ba61a2900da2cf9d) in parent group-v107847. [ 856.118720] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8a2d481-d63a-4d02-82f0-f3787fb45049 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Creating folder: Instances. Parent ref: group-v107866. {{(pid=62096) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 856.118922] env[62096]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4d5abc69-ba8b-47c1-b0df-235822b93057 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.127053] env[62096]: INFO nova.virt.vmwareapi.vm_util [None req-c8a2d481-d63a-4d02-82f0-f3787fb45049 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Created folder: Instances in parent group-v107866. [ 856.127283] env[62096]: DEBUG oslo.service.loopingcall [None req-c8a2d481-d63a-4d02-82f0-f3787fb45049 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 856.127460] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a7838568-9d47-4306-8bb6-9ad74ab1feb3] Creating VM on the ESX host {{(pid=62096) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 856.128042] env[62096]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-da8dc783-9215-4f08-9146-762e6c7d988f {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.143419] env[62096]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 856.143419] env[62096]: value = "task-397406" [ 856.143419] env[62096]: _type = "Task" [ 856.143419] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.152869] env[62096]: DEBUG oslo_vmware.api [-] Task: {'id': task-397406, 'name': CreateVM_Task} progress is 0%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.283006] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg 2cd18ed33a9547b791febc556f595a9d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 856.291505] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2cd18ed33a9547b791febc556f595a9d [ 856.408532] env[62096]: DEBUG nova.network.neutron [None req-31b37f0f-ed04-4f12-864d-f77713fa80b1 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 856.496596] env[62096]: DEBUG nova.network.neutron [None req-31b37f0f-ed04-4f12-864d-f77713fa80b1 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 856.497106] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-31b37f0f-ed04-4f12-864d-f77713fa80b1 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Expecting reply to msg d9fd2eb02ee047488ae9c6da396c9444 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 856.505124] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d9fd2eb02ee047488ae9c6da396c9444 [ 856.553207] env[62096]: DEBUG oslo_vmware.api [None req-9083a630-3722-46bd-841d-5e4bacb9e547 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Task: {'id': task-397403, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.440468} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.553460] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [None req-9083a630-3722-46bd-841d-5e4bacb9e547 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110/fb764baa-3805-45c4-a694-aa91b0932110.vmdk to [datastore2] 250ef7e7-266b-451d-8627-9cce211d4e83/250ef7e7-266b-451d-8627-9cce211d4e83.vmdk {{(pid=62096) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 856.553672] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-9083a630-3722-46bd-841d-5e4bacb9e547 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] [instance: 250ef7e7-266b-451d-8627-9cce211d4e83] Extending root virtual disk to 1048576 {{(pid=62096) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 856.553925] env[62096]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5a2ea3a0-6bd4-49d8-b1ff-88ceaa18d5ee {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.561362] env[62096]: DEBUG oslo_vmware.api [None req-9083a630-3722-46bd-841d-5e4bacb9e547 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Waiting for the task: (returnval){ [ 856.561362] env[62096]: value = "task-397407" [ 856.561362] env[62096]: _type = "Task" [ 856.561362] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.563973] env[62096]: INFO nova.compute.manager [-] [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] Took 1.03 seconds to deallocate network for instance. [ 856.569676] env[62096]: DEBUG nova.compute.claims [None req-5ad96e0f-1e23-473c-97f9-ade218cb855a tempest-InstanceActionsTestJSON-629359483 tempest-InstanceActionsTestJSON-629359483-project-member] [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] Aborting claim: {{(pid=62096) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 856.569851] env[62096]: DEBUG oslo_concurrency.lockutils [None req-5ad96e0f-1e23-473c-97f9-ade218cb855a tempest-InstanceActionsTestJSON-629359483 tempest-InstanceActionsTestJSON-629359483-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 856.574768] env[62096]: DEBUG oslo_vmware.api [None req-9083a630-3722-46bd-841d-5e4bacb9e547 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Task: {'id': task-397407, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.631693] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-913ce45b-3859-46e2-bc46-620d5ec7db8c {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.638956] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5033629-c7e4-431b-8037-e148afb44fec {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.670865] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-650ced47-c568-4015-a0c7-abefe64356a8 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.676647] env[62096]: DEBUG oslo_vmware.api [-] Task: {'id': task-397406, 'name': CreateVM_Task, 'duration_secs': 0.282686} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.677184] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a7838568-9d47-4306-8bb6-9ad74ab1feb3] Created VM on the ESX host {{(pid=62096) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 856.677681] env[62096]: DEBUG oslo_concurrency.lockutils [None req-c8a2d481-d63a-4d02-82f0-f3787fb45049 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 856.677911] env[62096]: DEBUG oslo_concurrency.lockutils [None req-c8a2d481-d63a-4d02-82f0-f3787fb45049 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Acquired lock "[datastore2] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 856.678272] env[62096]: DEBUG oslo_concurrency.lockutils [None req-c8a2d481-d63a-4d02-82f0-f3787fb45049 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 856.680246] env[62096]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f21d668b-5860-438f-bc0b-a0c932654a13 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.682653] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2499dacc-3dfa-4e1a-b506-0a83d362a547 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.688656] env[62096]: DEBUG oslo_vmware.api [None req-c8a2d481-d63a-4d02-82f0-f3787fb45049 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Waiting for the task: (returnval){ [ 856.688656] env[62096]: value = "session[5264b2d0-7e3b-3d3c-bfd1-bd630cad6eae]52770ccd-2933-61fd-203c-86ec0f3d9ae5" [ 856.688656] env[62096]: _type = "Task" [ 856.688656] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.696998] env[62096]: DEBUG nova.compute.provider_tree [None req-0c343c31-990f-417d-b18d-e0dbd941468a tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Updating inventory in ProviderTree for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 856.697560] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-0c343c31-990f-417d-b18d-e0dbd941468a tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Expecting reply to msg 17456ae87886463787b6b10eb7012386 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 856.706762] env[62096]: DEBUG oslo_vmware.api [None req-c8a2d481-d63a-4d02-82f0-f3787fb45049 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Task: {'id': session[5264b2d0-7e3b-3d3c-bfd1-bd630cad6eae]52770ccd-2933-61fd-203c-86ec0f3d9ae5, 'name': SearchDatastore_Task, 'duration_secs': 0.01026} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.707400] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 17456ae87886463787b6b10eb7012386 [ 856.707878] env[62096]: DEBUG oslo_concurrency.lockutils [None req-c8a2d481-d63a-4d02-82f0-f3787fb45049 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Releasing lock "[datastore2] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 856.708172] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-c8a2d481-d63a-4d02-82f0-f3787fb45049 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] [instance: a7838568-9d47-4306-8bb6-9ad74ab1feb3] Processing image fb764baa-3805-45c4-a694-aa91b0932110 {{(pid=62096) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 856.708456] env[62096]: DEBUG oslo_concurrency.lockutils [None req-c8a2d481-d63a-4d02-82f0-f3787fb45049 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110/fb764baa-3805-45c4-a694-aa91b0932110.vmdk" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 856.708683] env[62096]: DEBUG oslo_concurrency.lockutils [None req-c8a2d481-d63a-4d02-82f0-f3787fb45049 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Acquired lock "[datastore2] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110/fb764baa-3805-45c4-a694-aa91b0932110.vmdk" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 856.708924] env[62096]: DEBUG nova.virt.vmwareapi.ds_util [None req-c8a2d481-d63a-4d02-82f0-f3787fb45049 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62096) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 856.709536] env[62096]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b8d99dce-6be7-4ce0-be1d-5d36bcf653a6 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.716354] env[62096]: DEBUG nova.virt.vmwareapi.ds_util [None req-c8a2d481-d63a-4d02-82f0-f3787fb45049 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62096) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 856.716593] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-c8a2d481-d63a-4d02-82f0-f3787fb45049 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62096) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 856.717279] env[62096]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cce47180-14e6-4f89-b822-b1b1c73d1b76 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.721487] env[62096]: DEBUG oslo_vmware.api [None req-c8a2d481-d63a-4d02-82f0-f3787fb45049 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Waiting for the task: (returnval){ [ 856.721487] env[62096]: value = "session[5264b2d0-7e3b-3d3c-bfd1-bd630cad6eae]521bb784-7901-e325-889d-529f1dd7d338" [ 856.721487] env[62096]: _type = "Task" [ 856.721487] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.728200] env[62096]: DEBUG oslo_vmware.api [None req-c8a2d481-d63a-4d02-82f0-f3787fb45049 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Task: {'id': session[5264b2d0-7e3b-3d3c-bfd1-bd630cad6eae]521bb784-7901-e325-889d-529f1dd7d338, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.799539] env[62096]: DEBUG nova.network.neutron [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] [instance: 250ef7e7-266b-451d-8627-9cce211d4e83] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 856.799977] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg ef6867b313464a058b22cdfadcfb1859 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 856.809265] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ef6867b313464a058b22cdfadcfb1859 [ 856.998975] env[62096]: DEBUG oslo_concurrency.lockutils [None req-31b37f0f-ed04-4f12-864d-f77713fa80b1 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Releasing lock "refresh_cache-d5a6c627-bbab-49d4-a3bd-cb5b15264b18" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 856.999236] env[62096]: DEBUG nova.compute.manager [None req-31b37f0f-ed04-4f12-864d-f77713fa80b1 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 856.999423] env[62096]: DEBUG nova.compute.manager [None req-31b37f0f-ed04-4f12-864d-f77713fa80b1 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 856.999588] env[62096]: DEBUG nova.network.neutron [None req-31b37f0f-ed04-4f12-864d-f77713fa80b1 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 857.014083] env[62096]: DEBUG nova.network.neutron [None req-31b37f0f-ed04-4f12-864d-f77713fa80b1 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 857.014564] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-31b37f0f-ed04-4f12-864d-f77713fa80b1 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Expecting reply to msg 11df979182d2473da7232a9b88398372 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 857.022164] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 11df979182d2473da7232a9b88398372 [ 857.073958] env[62096]: DEBUG oslo_vmware.api [None req-9083a630-3722-46bd-841d-5e4bacb9e547 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Task: {'id': task-397407, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.061728} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.074238] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-9083a630-3722-46bd-841d-5e4bacb9e547 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] [instance: 250ef7e7-266b-451d-8627-9cce211d4e83] Extended root virtual disk {{(pid=62096) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 857.074977] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17ef2539-a64f-4fbb-8639-8ae8bf10e97a {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.093349] env[62096]: DEBUG nova.virt.vmwareapi.volumeops [None req-9083a630-3722-46bd-841d-5e4bacb9e547 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] [instance: 250ef7e7-266b-451d-8627-9cce211d4e83] Reconfiguring VM instance instance-00000042 to attach disk [datastore2] 250ef7e7-266b-451d-8627-9cce211d4e83/250ef7e7-266b-451d-8627-9cce211d4e83.vmdk or device None with type sparse {{(pid=62096) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 857.093829] env[62096]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e316e67b-e2b9-452f-83f4-a71c68ac1026 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.111817] env[62096]: DEBUG oslo_vmware.api [None req-9083a630-3722-46bd-841d-5e4bacb9e547 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Waiting for the task: (returnval){ [ 857.111817] env[62096]: value = "task-397408" [ 857.111817] env[62096]: _type = "Task" [ 857.111817] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.120841] env[62096]: DEBUG oslo_vmware.api [None req-9083a630-3722-46bd-841d-5e4bacb9e547 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Task: {'id': task-397408, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.232117] env[62096]: DEBUG oslo_vmware.api [None req-c8a2d481-d63a-4d02-82f0-f3787fb45049 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Task: {'id': session[5264b2d0-7e3b-3d3c-bfd1-bd630cad6eae]521bb784-7901-e325-889d-529f1dd7d338, 'name': SearchDatastore_Task, 'duration_secs': 0.007618} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.233060] env[62096]: DEBUG nova.scheduler.client.report [None req-0c343c31-990f-417d-b18d-e0dbd941468a tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Updated inventory for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 with generation 92 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:957}} [ 857.233295] env[62096]: DEBUG nova.compute.provider_tree [None req-0c343c31-990f-417d-b18d-e0dbd941468a tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Updating resource provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 generation from 92 to 93 during operation: update_inventory {{(pid=62096) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 857.233471] env[62096]: DEBUG nova.compute.provider_tree [None req-0c343c31-990f-417d-b18d-e0dbd941468a tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Updating inventory in ProviderTree for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 857.235946] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-0c343c31-990f-417d-b18d-e0dbd941468a tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Expecting reply to msg 8861008e528948c697962f762f524013 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 857.237586] env[62096]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8ab6e4ae-3d0c-4b4e-9493-fbbecd95450c {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.242524] env[62096]: DEBUG oslo_vmware.api [None req-c8a2d481-d63a-4d02-82f0-f3787fb45049 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Waiting for the task: (returnval){ [ 857.242524] env[62096]: value = "session[5264b2d0-7e3b-3d3c-bfd1-bd630cad6eae]520f1a23-7d46-8791-dbfa-9be5d2650584" [ 857.242524] env[62096]: _type = "Task" [ 857.242524] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.249942] env[62096]: DEBUG oslo_vmware.api [None req-c8a2d481-d63a-4d02-82f0-f3787fb45049 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Task: {'id': session[5264b2d0-7e3b-3d3c-bfd1-bd630cad6eae]520f1a23-7d46-8791-dbfa-9be5d2650584, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.250475] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8861008e528948c697962f762f524013 [ 857.356618] env[62096]: DEBUG nova.network.neutron [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] [instance: 250ef7e7-266b-451d-8627-9cce211d4e83] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 857.357150] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg 86d3009c6bf44d36aad66ef8d80db1a8 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 857.384173] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 86d3009c6bf44d36aad66ef8d80db1a8 [ 857.517995] env[62096]: DEBUG nova.network.neutron [None req-31b37f0f-ed04-4f12-864d-f77713fa80b1 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 857.517995] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-31b37f0f-ed04-4f12-864d-f77713fa80b1 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Expecting reply to msg b3a2a04993db463cb424d5577cef4190 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 857.524974] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b3a2a04993db463cb424d5577cef4190 [ 857.621486] env[62096]: DEBUG oslo_vmware.api [None req-9083a630-3722-46bd-841d-5e4bacb9e547 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Task: {'id': task-397408, 'name': ReconfigVM_Task, 'duration_secs': 0.261626} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.622026] env[62096]: DEBUG nova.virt.vmwareapi.volumeops [None req-9083a630-3722-46bd-841d-5e4bacb9e547 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] [instance: 250ef7e7-266b-451d-8627-9cce211d4e83] Reconfigured VM instance instance-00000042 to attach disk [datastore2] 250ef7e7-266b-451d-8627-9cce211d4e83/250ef7e7-266b-451d-8627-9cce211d4e83.vmdk or device None with type sparse {{(pid=62096) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 857.623135] env[62096]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b1e66b4c-a5b3-4bd6-a38e-b2036d9d2cbc {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.629993] env[62096]: DEBUG oslo_vmware.api [None req-9083a630-3722-46bd-841d-5e4bacb9e547 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Waiting for the task: (returnval){ [ 857.629993] env[62096]: value = "task-397409" [ 857.629993] env[62096]: _type = "Task" [ 857.629993] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.637916] env[62096]: DEBUG oslo_vmware.api [None req-9083a630-3722-46bd-841d-5e4bacb9e547 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Task: {'id': task-397409, 'name': Rename_Task} progress is 5%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.738662] env[62096]: DEBUG oslo_concurrency.lockutils [None req-0c343c31-990f-417d-b18d-e0dbd941468a tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.859s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 857.739506] env[62096]: ERROR nova.compute.manager [None req-0c343c31-990f-417d-b18d-e0dbd941468a tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 24bb1713-462a-4988-824c-abb2ceb0205b, please check neutron logs for more information. [ 857.739506] env[62096]: ERROR nova.compute.manager [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] Traceback (most recent call last): [ 857.739506] env[62096]: ERROR nova.compute.manager [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 857.739506] env[62096]: ERROR nova.compute.manager [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] self.driver.spawn(context, instance, image_meta, [ 857.739506] env[62096]: ERROR nova.compute.manager [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 857.739506] env[62096]: ERROR nova.compute.manager [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 857.739506] env[62096]: ERROR nova.compute.manager [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 857.739506] env[62096]: ERROR nova.compute.manager [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] vm_ref = self.build_virtual_machine(instance, [ 857.739506] env[62096]: ERROR nova.compute.manager [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 857.739506] env[62096]: ERROR nova.compute.manager [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] vif_infos = vmwarevif.get_vif_info(self._session, [ 857.739506] env[62096]: ERROR nova.compute.manager [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 857.739890] env[62096]: ERROR nova.compute.manager [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] for vif in network_info: [ 857.739890] env[62096]: ERROR nova.compute.manager [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 857.739890] env[62096]: ERROR nova.compute.manager [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] return self._sync_wrapper(fn, *args, **kwargs) [ 857.739890] env[62096]: ERROR nova.compute.manager [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 857.739890] env[62096]: ERROR nova.compute.manager [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] self.wait() [ 857.739890] env[62096]: ERROR nova.compute.manager [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 857.739890] env[62096]: ERROR nova.compute.manager [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] self[:] = self._gt.wait() [ 857.739890] env[62096]: ERROR nova.compute.manager [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 857.739890] env[62096]: ERROR nova.compute.manager [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] return self._exit_event.wait() [ 857.739890] env[62096]: ERROR nova.compute.manager [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 857.739890] env[62096]: ERROR nova.compute.manager [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] current.throw(*self._exc) [ 857.739890] env[62096]: ERROR nova.compute.manager [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 857.739890] env[62096]: ERROR nova.compute.manager [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] result = function(*args, **kwargs) [ 857.740376] env[62096]: ERROR nova.compute.manager [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 857.740376] env[62096]: ERROR nova.compute.manager [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] return func(*args, **kwargs) [ 857.740376] env[62096]: ERROR nova.compute.manager [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 857.740376] env[62096]: ERROR nova.compute.manager [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] raise e [ 857.740376] env[62096]: ERROR nova.compute.manager [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 857.740376] env[62096]: ERROR nova.compute.manager [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] nwinfo = self.network_api.allocate_for_instance( [ 857.740376] env[62096]: ERROR nova.compute.manager [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 857.740376] env[62096]: ERROR nova.compute.manager [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] created_port_ids = self._update_ports_for_instance( [ 857.740376] env[62096]: ERROR nova.compute.manager [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 857.740376] env[62096]: ERROR nova.compute.manager [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] with excutils.save_and_reraise_exception(): [ 857.740376] env[62096]: ERROR nova.compute.manager [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 857.740376] env[62096]: ERROR nova.compute.manager [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] self.force_reraise() [ 857.740376] env[62096]: ERROR nova.compute.manager [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 857.740762] env[62096]: ERROR nova.compute.manager [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] raise self.value [ 857.740762] env[62096]: ERROR nova.compute.manager [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 857.740762] env[62096]: ERROR nova.compute.manager [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] updated_port = self._update_port( [ 857.740762] env[62096]: ERROR nova.compute.manager [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 857.740762] env[62096]: ERROR nova.compute.manager [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] _ensure_no_port_binding_failure(port) [ 857.740762] env[62096]: ERROR nova.compute.manager [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 857.740762] env[62096]: ERROR nova.compute.manager [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] raise exception.PortBindingFailed(port_id=port['id']) [ 857.740762] env[62096]: ERROR nova.compute.manager [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] nova.exception.PortBindingFailed: Binding failed for port 24bb1713-462a-4988-824c-abb2ceb0205b, please check neutron logs for more information. [ 857.740762] env[62096]: ERROR nova.compute.manager [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] [ 857.741037] env[62096]: DEBUG nova.compute.utils [None req-0c343c31-990f-417d-b18d-e0dbd941468a tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] Binding failed for port 24bb1713-462a-4988-824c-abb2ceb0205b, please check neutron logs for more information. {{(pid=62096) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 857.742178] env[62096]: DEBUG oslo_concurrency.lockutils [None req-9602e4c9-801d-4ec4-85d8-84d6ec45cb64 tempest-ServersTestJSON-242390470 tempest-ServersTestJSON-242390470-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.491s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 857.743697] env[62096]: INFO nova.compute.claims [None req-9602e4c9-801d-4ec4-85d8-84d6ec45cb64 tempest-ServersTestJSON-242390470 tempest-ServersTestJSON-242390470-project-member] [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 857.745666] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-9602e4c9-801d-4ec4-85d8-84d6ec45cb64 tempest-ServersTestJSON-242390470 tempest-ServersTestJSON-242390470-project-member] Expecting reply to msg fbd9145a165345f08a4c11e0a8af9652 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 857.746965] env[62096]: DEBUG nova.compute.manager [None req-0c343c31-990f-417d-b18d-e0dbd941468a tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] Build of instance 4057df30-37e2-415d-b6d2-e4211b95863d was re-scheduled: Binding failed for port 24bb1713-462a-4988-824c-abb2ceb0205b, please check neutron logs for more information. {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 857.747405] env[62096]: DEBUG nova.compute.manager [None req-0c343c31-990f-417d-b18d-e0dbd941468a tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] Unplugging VIFs for instance {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 857.747631] env[62096]: DEBUG oslo_concurrency.lockutils [None req-0c343c31-990f-417d-b18d-e0dbd941468a tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Acquiring lock "refresh_cache-4057df30-37e2-415d-b6d2-e4211b95863d" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 857.747771] env[62096]: DEBUG oslo_concurrency.lockutils [None req-0c343c31-990f-417d-b18d-e0dbd941468a tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Acquired lock "refresh_cache-4057df30-37e2-415d-b6d2-e4211b95863d" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 857.747927] env[62096]: DEBUG nova.network.neutron [None req-0c343c31-990f-417d-b18d-e0dbd941468a tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 857.748316] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-0c343c31-990f-417d-b18d-e0dbd941468a tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Expecting reply to msg 27269c6df88f430690bb1a514c252e54 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 857.758172] env[62096]: DEBUG oslo_vmware.api [None req-c8a2d481-d63a-4d02-82f0-f3787fb45049 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Task: {'id': session[5264b2d0-7e3b-3d3c-bfd1-bd630cad6eae]520f1a23-7d46-8791-dbfa-9be5d2650584, 'name': SearchDatastore_Task, 'duration_secs': 0.00906} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.758636] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 27269c6df88f430690bb1a514c252e54 [ 857.759029] env[62096]: DEBUG oslo_concurrency.lockutils [None req-c8a2d481-d63a-4d02-82f0-f3787fb45049 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Releasing lock "[datastore2] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110/fb764baa-3805-45c4-a694-aa91b0932110.vmdk" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 857.759270] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8a2d481-d63a-4d02-82f0-f3787fb45049 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110/fb764baa-3805-45c4-a694-aa91b0932110.vmdk to [datastore2] a7838568-9d47-4306-8bb6-9ad74ab1feb3/a7838568-9d47-4306-8bb6-9ad74ab1feb3.vmdk {{(pid=62096) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 857.759512] env[62096]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ea2bb943-2794-4ab4-a401-d5a8f81bc767 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.765343] env[62096]: DEBUG oslo_vmware.api [None req-c8a2d481-d63a-4d02-82f0-f3787fb45049 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Waiting for the task: (returnval){ [ 857.765343] env[62096]: value = "task-397410" [ 857.765343] env[62096]: _type = "Task" [ 857.765343] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.772848] env[62096]: DEBUG oslo_vmware.api [None req-c8a2d481-d63a-4d02-82f0-f3787fb45049 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Task: {'id': task-397410, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.787208] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fbd9145a165345f08a4c11e0a8af9652 [ 857.859061] env[62096]: DEBUG oslo_concurrency.lockutils [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Releasing lock "refresh_cache-250ef7e7-266b-451d-8627-9cce211d4e83" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 857.859311] env[62096]: DEBUG nova.compute.manager [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] [instance: 250ef7e7-266b-451d-8627-9cce211d4e83] Updated the network info_cache for instance {{(pid=62096) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 857.859562] env[62096]: DEBUG oslo_service.periodic_task [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62096) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 857.859731] env[62096]: DEBUG oslo_service.periodic_task [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62096) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 857.859878] env[62096]: DEBUG oslo_service.periodic_task [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62096) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 857.860088] env[62096]: DEBUG oslo_service.periodic_task [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62096) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 857.860328] env[62096]: DEBUG oslo_service.periodic_task [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62096) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 857.860497] env[62096]: DEBUG oslo_service.periodic_task [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62096) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 857.860640] env[62096]: DEBUG nova.compute.manager [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62096) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10545}} [ 857.860790] env[62096]: DEBUG oslo_service.periodic_task [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62096) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 857.861166] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg 59ebab55151a4782aa4c6af9d6db50cb in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 857.870730] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 59ebab55151a4782aa4c6af9d6db50cb [ 858.018925] env[62096]: INFO nova.compute.manager [None req-31b37f0f-ed04-4f12-864d-f77713fa80b1 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] [instance: d5a6c627-bbab-49d4-a3bd-cb5b15264b18] Took 1.02 seconds to deallocate network for instance. [ 858.020893] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-31b37f0f-ed04-4f12-864d-f77713fa80b1 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Expecting reply to msg 8bdd03e15ca24ab4b550905e0f01b9ae in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 858.054219] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8bdd03e15ca24ab4b550905e0f01b9ae [ 858.142463] env[62096]: DEBUG oslo_vmware.api [None req-9083a630-3722-46bd-841d-5e4bacb9e547 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Task: {'id': task-397409, 'name': Rename_Task, 'duration_secs': 0.127248} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.142818] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [None req-9083a630-3722-46bd-841d-5e4bacb9e547 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] [instance: 250ef7e7-266b-451d-8627-9cce211d4e83] Powering on the VM {{(pid=62096) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 858.143077] env[62096]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ec575fa9-1f3a-411f-8055-ccffd3f3b7e5 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.149864] env[62096]: DEBUG oslo_vmware.api [None req-9083a630-3722-46bd-841d-5e4bacb9e547 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Waiting for the task: (returnval){ [ 858.149864] env[62096]: value = "task-397411" [ 858.149864] env[62096]: _type = "Task" [ 858.149864] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.158066] env[62096]: DEBUG oslo_vmware.api [None req-9083a630-3722-46bd-841d-5e4bacb9e547 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Task: {'id': task-397411, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.251947] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-9602e4c9-801d-4ec4-85d8-84d6ec45cb64 tempest-ServersTestJSON-242390470 tempest-ServersTestJSON-242390470-project-member] Expecting reply to msg a2c6526e12834f47a09444f92e519c9c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 858.259976] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a2c6526e12834f47a09444f92e519c9c [ 858.270127] env[62096]: DEBUG nova.network.neutron [None req-0c343c31-990f-417d-b18d-e0dbd941468a tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 858.278352] env[62096]: DEBUG oslo_vmware.api [None req-c8a2d481-d63a-4d02-82f0-f3787fb45049 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Task: {'id': task-397410, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.498005} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.278603] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8a2d481-d63a-4d02-82f0-f3787fb45049 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110/fb764baa-3805-45c4-a694-aa91b0932110.vmdk to [datastore2] a7838568-9d47-4306-8bb6-9ad74ab1feb3/a7838568-9d47-4306-8bb6-9ad74ab1feb3.vmdk {{(pid=62096) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 858.278808] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-c8a2d481-d63a-4d02-82f0-f3787fb45049 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] [instance: a7838568-9d47-4306-8bb6-9ad74ab1feb3] Extending root virtual disk to 1048576 {{(pid=62096) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 858.279077] env[62096]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-815f399b-9a32-4962-8d65-fb2808adfb97 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.284845] env[62096]: DEBUG oslo_vmware.api [None req-c8a2d481-d63a-4d02-82f0-f3787fb45049 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Waiting for the task: (returnval){ [ 858.284845] env[62096]: value = "task-397412" [ 858.284845] env[62096]: _type = "Task" [ 858.284845] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.292684] env[62096]: DEBUG oslo_vmware.api [None req-c8a2d481-d63a-4d02-82f0-f3787fb45049 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Task: {'id': task-397412, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.346244] env[62096]: DEBUG nova.network.neutron [None req-0c343c31-990f-417d-b18d-e0dbd941468a tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 858.346874] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-0c343c31-990f-417d-b18d-e0dbd941468a tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Expecting reply to msg d71531607b8444d0a88cc74614668990 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 858.355490] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d71531607b8444d0a88cc74614668990 [ 858.364774] env[62096]: DEBUG oslo_concurrency.lockutils [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 858.526137] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-31b37f0f-ed04-4f12-864d-f77713fa80b1 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Expecting reply to msg a8b94d467d7e45d9ab2271764871b3d6 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 858.558108] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a8b94d467d7e45d9ab2271764871b3d6 [ 858.659481] env[62096]: DEBUG oslo_vmware.api [None req-9083a630-3722-46bd-841d-5e4bacb9e547 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Task: {'id': task-397411, 'name': PowerOnVM_Task, 'duration_secs': 0.469085} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.659729] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [None req-9083a630-3722-46bd-841d-5e4bacb9e547 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] [instance: 250ef7e7-266b-451d-8627-9cce211d4e83] Powered on the VM {{(pid=62096) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 858.659973] env[62096]: DEBUG nova.compute.manager [None req-9083a630-3722-46bd-841d-5e4bacb9e547 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] [instance: 250ef7e7-266b-451d-8627-9cce211d4e83] Checking state {{(pid=62096) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 858.660745] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0663d0a9-cdd3-48e7-bad4-21a3ed73d993 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.667606] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-9083a630-3722-46bd-841d-5e4bacb9e547 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Expecting reply to msg 4fdeb62b084547e19a46f915f25a265c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 858.695719] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4fdeb62b084547e19a46f915f25a265c [ 858.793461] env[62096]: DEBUG oslo_vmware.api [None req-c8a2d481-d63a-4d02-82f0-f3787fb45049 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Task: {'id': task-397412, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.124156} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.795713] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-c8a2d481-d63a-4d02-82f0-f3787fb45049 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] [instance: a7838568-9d47-4306-8bb6-9ad74ab1feb3] Extended root virtual disk {{(pid=62096) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 858.796644] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d3cc8c4-72f5-4119-a59e-343d924097bf {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.815576] env[62096]: DEBUG nova.virt.vmwareapi.volumeops [None req-c8a2d481-d63a-4d02-82f0-f3787fb45049 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] [instance: a7838568-9d47-4306-8bb6-9ad74ab1feb3] Reconfiguring VM instance instance-00000045 to attach disk [datastore2] a7838568-9d47-4306-8bb6-9ad74ab1feb3/a7838568-9d47-4306-8bb6-9ad74ab1feb3.vmdk or device None with type sparse {{(pid=62096) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 858.817957] env[62096]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4d773a9e-538d-4fea-9c1b-6278fad45224 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.836395] env[62096]: DEBUG oslo_vmware.api [None req-c8a2d481-d63a-4d02-82f0-f3787fb45049 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Waiting for the task: (returnval){ [ 858.836395] env[62096]: value = "task-397413" [ 858.836395] env[62096]: _type = "Task" [ 858.836395] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.845102] env[62096]: DEBUG oslo_vmware.api [None req-c8a2d481-d63a-4d02-82f0-f3787fb45049 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Task: {'id': task-397413, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.849619] env[62096]: DEBUG oslo_concurrency.lockutils [None req-0c343c31-990f-417d-b18d-e0dbd941468a tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Releasing lock "refresh_cache-4057df30-37e2-415d-b6d2-e4211b95863d" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 858.849833] env[62096]: DEBUG nova.compute.manager [None req-0c343c31-990f-417d-b18d-e0dbd941468a tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 858.850045] env[62096]: DEBUG nova.compute.manager [None req-0c343c31-990f-417d-b18d-e0dbd941468a tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 858.850233] env[62096]: DEBUG nova.network.neutron [None req-0c343c31-990f-417d-b18d-e0dbd941468a tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 858.868516] env[62096]: DEBUG nova.network.neutron [None req-0c343c31-990f-417d-b18d-e0dbd941468a tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 858.869103] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-0c343c31-990f-417d-b18d-e0dbd941468a tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Expecting reply to msg 3e87cd0336e749b0ba48a84034ed57b2 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 858.875997] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3e87cd0336e749b0ba48a84034ed57b2 [ 858.969756] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73a504c0-aba5-466f-99a6-491b1c12003b {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.976929] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-600b3a2e-9e53-4346-842a-32804b268ebb {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.008878] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54984286-3df8-48ef-9cde-0ad5949a2e0e {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.017749] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fa660e3-5301-4e3d-aabb-7026dce375a3 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.035315] env[62096]: DEBUG nova.compute.provider_tree [None req-9602e4c9-801d-4ec4-85d8-84d6ec45cb64 tempest-ServersTestJSON-242390470 tempest-ServersTestJSON-242390470-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 859.036052] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-9602e4c9-801d-4ec4-85d8-84d6ec45cb64 tempest-ServersTestJSON-242390470 tempest-ServersTestJSON-242390470-project-member] Expecting reply to msg f62a4dff0c6546a28f11766d858be49a in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 859.043741] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f62a4dff0c6546a28f11766d858be49a [ 859.054435] env[62096]: INFO nova.scheduler.client.report [None req-31b37f0f-ed04-4f12-864d-f77713fa80b1 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Deleted allocations for instance d5a6c627-bbab-49d4-a3bd-cb5b15264b18 [ 859.059542] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-31b37f0f-ed04-4f12-864d-f77713fa80b1 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Expecting reply to msg 25990436f73246d7a5e13c9bae7ab37e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 859.073929] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 25990436f73246d7a5e13c9bae7ab37e [ 859.178633] env[62096]: DEBUG oslo_concurrency.lockutils [None req-9083a630-3722-46bd-841d-5e4bacb9e547 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 859.346239] env[62096]: DEBUG oslo_vmware.api [None req-c8a2d481-d63a-4d02-82f0-f3787fb45049 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Task: {'id': task-397413, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.371917] env[62096]: DEBUG nova.network.neutron [None req-0c343c31-990f-417d-b18d-e0dbd941468a tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 859.372512] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-0c343c31-990f-417d-b18d-e0dbd941468a tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Expecting reply to msg fa0dd98bff2c4bb2b6ba7ecb2b6b7ee7 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 859.381745] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fa0dd98bff2c4bb2b6ba7ecb2b6b7ee7 [ 859.535309] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-cc6c4fa9-fc85-41c0-a8a8-237fec5b300e tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Expecting reply to msg 15015b40387443079f673444d2523633 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 859.538750] env[62096]: DEBUG nova.scheduler.client.report [None req-9602e4c9-801d-4ec4-85d8-84d6ec45cb64 tempest-ServersTestJSON-242390470 tempest-ServersTestJSON-242390470-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 859.541241] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-9602e4c9-801d-4ec4-85d8-84d6ec45cb64 tempest-ServersTestJSON-242390470 tempest-ServersTestJSON-242390470-project-member] Expecting reply to msg 86ca94c40a88462b9591f78e03dc9da1 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 859.544048] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 15015b40387443079f673444d2523633 [ 859.560174] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 86ca94c40a88462b9591f78e03dc9da1 [ 859.560730] env[62096]: DEBUG oslo_concurrency.lockutils [None req-31b37f0f-ed04-4f12-864d-f77713fa80b1 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Lock "d5a6c627-bbab-49d4-a3bd-cb5b15264b18" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 140.235s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 859.561277] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-0f19979c-d609-46af-b70a-3aa130c0212c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg 57929be6937145c689a48c753c7fdfb8 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 859.569560] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 57929be6937145c689a48c753c7fdfb8 [ 859.847729] env[62096]: DEBUG oslo_vmware.api [None req-c8a2d481-d63a-4d02-82f0-f3787fb45049 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Task: {'id': task-397413, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.874540] env[62096]: INFO nova.compute.manager [None req-0c343c31-990f-417d-b18d-e0dbd941468a tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] [instance: 4057df30-37e2-415d-b6d2-e4211b95863d] Took 1.02 seconds to deallocate network for instance. [ 859.876547] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-0c343c31-990f-417d-b18d-e0dbd941468a tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Expecting reply to msg 0f82da34c7f54b3cbdc73b0a88d5d92f in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 859.915084] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0f82da34c7f54b3cbdc73b0a88d5d92f [ 860.038025] env[62096]: DEBUG oslo_concurrency.lockutils [None req-cc6c4fa9-fc85-41c0-a8a8-237fec5b300e tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Acquiring lock "250ef7e7-266b-451d-8627-9cce211d4e83" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 860.038474] env[62096]: DEBUG oslo_concurrency.lockutils [None req-cc6c4fa9-fc85-41c0-a8a8-237fec5b300e tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Lock "250ef7e7-266b-451d-8627-9cce211d4e83" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 860.038569] env[62096]: DEBUG oslo_concurrency.lockutils [None req-cc6c4fa9-fc85-41c0-a8a8-237fec5b300e tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Acquiring lock "250ef7e7-266b-451d-8627-9cce211d4e83-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 860.038679] env[62096]: DEBUG oslo_concurrency.lockutils [None req-cc6c4fa9-fc85-41c0-a8a8-237fec5b300e tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Lock "250ef7e7-266b-451d-8627-9cce211d4e83-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 860.038844] env[62096]: DEBUG oslo_concurrency.lockutils [None req-cc6c4fa9-fc85-41c0-a8a8-237fec5b300e tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Lock "250ef7e7-266b-451d-8627-9cce211d4e83-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 860.043293] env[62096]: INFO nova.compute.manager [None req-cc6c4fa9-fc85-41c0-a8a8-237fec5b300e tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] [instance: 250ef7e7-266b-451d-8627-9cce211d4e83] Terminating instance [ 860.045268] env[62096]: DEBUG oslo_concurrency.lockutils [None req-cc6c4fa9-fc85-41c0-a8a8-237fec5b300e tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Acquiring lock "refresh_cache-250ef7e7-266b-451d-8627-9cce211d4e83" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 860.045425] env[62096]: DEBUG oslo_concurrency.lockutils [None req-cc6c4fa9-fc85-41c0-a8a8-237fec5b300e tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Acquired lock "refresh_cache-250ef7e7-266b-451d-8627-9cce211d4e83" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 860.045587] env[62096]: DEBUG nova.network.neutron [None req-cc6c4fa9-fc85-41c0-a8a8-237fec5b300e tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] [instance: 250ef7e7-266b-451d-8627-9cce211d4e83] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 860.045983] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-cc6c4fa9-fc85-41c0-a8a8-237fec5b300e tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Expecting reply to msg 920a406ea50b490ea4566955999c242d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 860.047570] env[62096]: DEBUG oslo_concurrency.lockutils [None req-9602e4c9-801d-4ec4-85d8-84d6ec45cb64 tempest-ServersTestJSON-242390470 tempest-ServersTestJSON-242390470-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.305s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 860.048063] env[62096]: DEBUG nova.compute.manager [None req-9602e4c9-801d-4ec4-85d8-84d6ec45cb64 tempest-ServersTestJSON-242390470 tempest-ServersTestJSON-242390470-project-member] [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] Start building networks asynchronously for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 860.049789] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-9602e4c9-801d-4ec4-85d8-84d6ec45cb64 tempest-ServersTestJSON-242390470 tempest-ServersTestJSON-242390470-project-member] Expecting reply to msg be21737e366f41e68ca4877ff7bbb763 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 860.058709] env[62096]: DEBUG oslo_concurrency.lockutils [None req-265784be-d456-4382-9cd2-a751a291282e tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.201s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 860.058709] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-265784be-d456-4382-9cd2-a751a291282e tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg 0e601936056f4b15912e9329a7fcc955 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 860.059759] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 920a406ea50b490ea4566955999c242d [ 860.063584] env[62096]: DEBUG nova.compute.manager [None req-0f19979c-d609-46af-b70a-3aa130c0212c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] Starting instance... {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 860.065177] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-0f19979c-d609-46af-b70a-3aa130c0212c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg d45dc7457a6747f1b12feaf5ddae2598 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 860.105175] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0e601936056f4b15912e9329a7fcc955 [ 860.108505] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg be21737e366f41e68ca4877ff7bbb763 [ 860.115546] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d45dc7457a6747f1b12feaf5ddae2598 [ 860.347604] env[62096]: DEBUG oslo_vmware.api [None req-c8a2d481-d63a-4d02-82f0-f3787fb45049 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Task: {'id': task-397413, 'name': ReconfigVM_Task, 'duration_secs': 1.197238} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.347881] env[62096]: DEBUG nova.virt.vmwareapi.volumeops [None req-c8a2d481-d63a-4d02-82f0-f3787fb45049 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] [instance: a7838568-9d47-4306-8bb6-9ad74ab1feb3] Reconfigured VM instance instance-00000045 to attach disk [datastore2] a7838568-9d47-4306-8bb6-9ad74ab1feb3/a7838568-9d47-4306-8bb6-9ad74ab1feb3.vmdk or device None with type sparse {{(pid=62096) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 860.348529] env[62096]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-edfbf0c0-48ab-4140-a7df-beda93154e9e {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.355293] env[62096]: DEBUG oslo_vmware.api [None req-c8a2d481-d63a-4d02-82f0-f3787fb45049 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Waiting for the task: (returnval){ [ 860.355293] env[62096]: value = "task-397414" [ 860.355293] env[62096]: _type = "Task" [ 860.355293] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 860.362877] env[62096]: DEBUG oslo_vmware.api [None req-c8a2d481-d63a-4d02-82f0-f3787fb45049 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Task: {'id': task-397414, 'name': Rename_Task} progress is 5%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.381864] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-0c343c31-990f-417d-b18d-e0dbd941468a tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Expecting reply to msg 33c4049fa9ad45fd9ab482fed5f44d75 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 860.414427] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 33c4049fa9ad45fd9ab482fed5f44d75 [ 860.553778] env[62096]: DEBUG nova.compute.utils [None req-9602e4c9-801d-4ec4-85d8-84d6ec45cb64 tempest-ServersTestJSON-242390470 tempest-ServersTestJSON-242390470-project-member] Using /dev/sd instead of None {{(pid=62096) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 860.558208] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-9602e4c9-801d-4ec4-85d8-84d6ec45cb64 tempest-ServersTestJSON-242390470 tempest-ServersTestJSON-242390470-project-member] Expecting reply to msg 0cb9b5cbba1344deb6d1e2bc27e4fd9d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 860.563037] env[62096]: DEBUG nova.compute.manager [None req-9602e4c9-801d-4ec4-85d8-84d6ec45cb64 tempest-ServersTestJSON-242390470 tempest-ServersTestJSON-242390470-project-member] [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] Allocating IP information in the background. {{(pid=62096) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 860.563037] env[62096]: DEBUG nova.network.neutron [None req-9602e4c9-801d-4ec4-85d8-84d6ec45cb64 tempest-ServersTestJSON-242390470 tempest-ServersTestJSON-242390470-project-member] [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] allocate_for_instance() {{(pid=62096) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 860.573346] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0cb9b5cbba1344deb6d1e2bc27e4fd9d [ 860.575361] env[62096]: DEBUG nova.network.neutron [None req-cc6c4fa9-fc85-41c0-a8a8-237fec5b300e tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] [instance: 250ef7e7-266b-451d-8627-9cce211d4e83] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 860.584211] env[62096]: DEBUG oslo_concurrency.lockutils [None req-0f19979c-d609-46af-b70a-3aa130c0212c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 860.638982] env[62096]: DEBUG nova.network.neutron [None req-cc6c4fa9-fc85-41c0-a8a8-237fec5b300e tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] [instance: 250ef7e7-266b-451d-8627-9cce211d4e83] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 860.639514] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-cc6c4fa9-fc85-41c0-a8a8-237fec5b300e tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Expecting reply to msg 225e091c6e7e40fea7b2023e37253628 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 860.648322] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 225e091c6e7e40fea7b2023e37253628 [ 860.697110] env[62096]: DEBUG nova.policy [None req-9602e4c9-801d-4ec4-85d8-84d6ec45cb64 tempest-ServersTestJSON-242390470 tempest-ServersTestJSON-242390470-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ae5f4544952a4f15a140099804434793', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '53bec8942a4744f2899e2dbb69558444', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62096) authorize /opt/stack/nova/nova/policy.py:203}} [ 860.789267] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37c385f3-e68c-484f-b529-7315535b38e0 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.796645] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40ba8ce8-de0e-4266-9e9c-ab6e78493c38 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.827614] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94f5e4a6-b8af-448f-950d-dff2fa2c55eb {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.835169] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c72a56d-8baf-4118-8fd4-1d6f0aeb190f {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.850257] env[62096]: DEBUG nova.compute.provider_tree [None req-265784be-d456-4382-9cd2-a751a291282e tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 860.850771] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-265784be-d456-4382-9cd2-a751a291282e tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg aa181d6cdb894f6197346e6618792a89 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 860.859367] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg aa181d6cdb894f6197346e6618792a89 [ 860.864711] env[62096]: DEBUG oslo_vmware.api [None req-c8a2d481-d63a-4d02-82f0-f3787fb45049 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Task: {'id': task-397414, 'name': Rename_Task, 'duration_secs': 0.123243} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.864971] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8a2d481-d63a-4d02-82f0-f3787fb45049 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] [instance: a7838568-9d47-4306-8bb6-9ad74ab1feb3] Powering on the VM {{(pid=62096) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 860.865217] env[62096]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a17dccbf-bbeb-473c-9b08-3377b16f2b98 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.871161] env[62096]: DEBUG oslo_vmware.api [None req-c8a2d481-d63a-4d02-82f0-f3787fb45049 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Waiting for the task: (returnval){ [ 860.871161] env[62096]: value = "task-397415" [ 860.871161] env[62096]: _type = "Task" [ 860.871161] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 860.878965] env[62096]: DEBUG oslo_vmware.api [None req-c8a2d481-d63a-4d02-82f0-f3787fb45049 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Task: {'id': task-397415, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.902752] env[62096]: INFO nova.scheduler.client.report [None req-0c343c31-990f-417d-b18d-e0dbd941468a tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Deleted allocations for instance 4057df30-37e2-415d-b6d2-e4211b95863d [ 860.909223] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-0c343c31-990f-417d-b18d-e0dbd941468a tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Expecting reply to msg 800f766821334b3682fc0fba85a5d6d5 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 860.923884] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 800f766821334b3682fc0fba85a5d6d5 [ 861.060633] env[62096]: DEBUG nova.compute.manager [None req-9602e4c9-801d-4ec4-85d8-84d6ec45cb64 tempest-ServersTestJSON-242390470 tempest-ServersTestJSON-242390470-project-member] [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] Start building block device mappings for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 861.062650] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-9602e4c9-801d-4ec4-85d8-84d6ec45cb64 tempest-ServersTestJSON-242390470 tempest-ServersTestJSON-242390470-project-member] Expecting reply to msg c64ee9fb256a43faa50206325b759c6a in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 861.109532] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c64ee9fb256a43faa50206325b759c6a [ 861.141640] env[62096]: DEBUG oslo_concurrency.lockutils [None req-cc6c4fa9-fc85-41c0-a8a8-237fec5b300e tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Releasing lock "refresh_cache-250ef7e7-266b-451d-8627-9cce211d4e83" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 861.142334] env[62096]: DEBUG nova.compute.manager [None req-cc6c4fa9-fc85-41c0-a8a8-237fec5b300e tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] [instance: 250ef7e7-266b-451d-8627-9cce211d4e83] Start destroying the instance on the hypervisor. {{(pid=62096) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 861.142552] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-cc6c4fa9-fc85-41c0-a8a8-237fec5b300e tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] [instance: 250ef7e7-266b-451d-8627-9cce211d4e83] Destroying instance {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 861.143628] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b4296a7-0d51-4a5c-9cf8-337f840f2e82 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.153385] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc6c4fa9-fc85-41c0-a8a8-237fec5b300e tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] [instance: 250ef7e7-266b-451d-8627-9cce211d4e83] Powering off the VM {{(pid=62096) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 861.153676] env[62096]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b66a16b8-fd42-45b0-bd8a-ea6018c08c19 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.160738] env[62096]: DEBUG oslo_vmware.api [None req-cc6c4fa9-fc85-41c0-a8a8-237fec5b300e tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Waiting for the task: (returnval){ [ 861.160738] env[62096]: value = "task-397416" [ 861.160738] env[62096]: _type = "Task" [ 861.160738] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.169766] env[62096]: DEBUG oslo_vmware.api [None req-cc6c4fa9-fc85-41c0-a8a8-237fec5b300e tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Task: {'id': task-397416, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.324434] env[62096]: DEBUG nova.network.neutron [None req-9602e4c9-801d-4ec4-85d8-84d6ec45cb64 tempest-ServersTestJSON-242390470 tempest-ServersTestJSON-242390470-project-member] [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] Successfully created port: b1b64d64-3759-4701-ae6a-64d26a18db32 {{(pid=62096) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 861.353377] env[62096]: DEBUG nova.scheduler.client.report [None req-265784be-d456-4382-9cd2-a751a291282e tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 861.356492] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-265784be-d456-4382-9cd2-a751a291282e tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg 816c2c2dcf724861866e2adfa0c3c8e3 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 861.375802] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 816c2c2dcf724861866e2adfa0c3c8e3 [ 861.383015] env[62096]: DEBUG oslo_vmware.api [None req-c8a2d481-d63a-4d02-82f0-f3787fb45049 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Task: {'id': task-397415, 'name': PowerOnVM_Task} progress is 64%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.411556] env[62096]: DEBUG oslo_concurrency.lockutils [None req-0c343c31-990f-417d-b18d-e0dbd941468a tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Lock "4057df30-37e2-415d-b6d2-e4211b95863d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 141.194s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 861.412201] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-37030ed6-9fb4-44d5-b2b3-bbe259fd74a3 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Expecting reply to msg 08a6942e679c4114a4dc4cf586f69c2d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 861.426986] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 08a6942e679c4114a4dc4cf586f69c2d [ 861.568154] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-9602e4c9-801d-4ec4-85d8-84d6ec45cb64 tempest-ServersTestJSON-242390470 tempest-ServersTestJSON-242390470-project-member] Expecting reply to msg 9499396d94dd4a53a223ae6647b084dc in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 861.615357] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9499396d94dd4a53a223ae6647b084dc [ 861.670737] env[62096]: DEBUG oslo_vmware.api [None req-cc6c4fa9-fc85-41c0-a8a8-237fec5b300e tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Task: {'id': task-397416, 'name': PowerOffVM_Task, 'duration_secs': 0.41127} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 861.671053] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc6c4fa9-fc85-41c0-a8a8-237fec5b300e tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] [instance: 250ef7e7-266b-451d-8627-9cce211d4e83] Powered off the VM {{(pid=62096) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 861.671231] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-cc6c4fa9-fc85-41c0-a8a8-237fec5b300e tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] [instance: 250ef7e7-266b-451d-8627-9cce211d4e83] Unregistering the VM {{(pid=62096) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 861.671479] env[62096]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1c3699d1-9718-4418-98c1-09deb8427757 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.694483] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-cc6c4fa9-fc85-41c0-a8a8-237fec5b300e tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] [instance: 250ef7e7-266b-451d-8627-9cce211d4e83] Unregistered the VM {{(pid=62096) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 861.694698] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-cc6c4fa9-fc85-41c0-a8a8-237fec5b300e tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] [instance: 250ef7e7-266b-451d-8627-9cce211d4e83] Deleting contents of the VM from datastore datastore2 {{(pid=62096) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 861.694878] env[62096]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc6c4fa9-fc85-41c0-a8a8-237fec5b300e tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Deleting the datastore file [datastore2] 250ef7e7-266b-451d-8627-9cce211d4e83 {{(pid=62096) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 861.695162] env[62096]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-db7dccd2-e55c-45f6-9fd9-22abbbdab4b0 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.701342] env[62096]: DEBUG oslo_vmware.api [None req-cc6c4fa9-fc85-41c0-a8a8-237fec5b300e tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Waiting for the task: (returnval){ [ 861.701342] env[62096]: value = "task-397418" [ 861.701342] env[62096]: _type = "Task" [ 861.701342] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.709096] env[62096]: DEBUG oslo_vmware.api [None req-cc6c4fa9-fc85-41c0-a8a8-237fec5b300e tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Task: {'id': task-397418, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.859745] env[62096]: DEBUG oslo_concurrency.lockutils [None req-265784be-d456-4382-9cd2-a751a291282e tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.808s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 861.860398] env[62096]: ERROR nova.compute.manager [None req-265784be-d456-4382-9cd2-a751a291282e tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 38272d5d-1fd8-4295-91b2-3fc9e342614d, please check neutron logs for more information. [ 861.860398] env[62096]: ERROR nova.compute.manager [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] Traceback (most recent call last): [ 861.860398] env[62096]: ERROR nova.compute.manager [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 861.860398] env[62096]: ERROR nova.compute.manager [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] self.driver.spawn(context, instance, image_meta, [ 861.860398] env[62096]: ERROR nova.compute.manager [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 861.860398] env[62096]: ERROR nova.compute.manager [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] self._vmops.spawn(context, instance, image_meta, injected_files, [ 861.860398] env[62096]: ERROR nova.compute.manager [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 861.860398] env[62096]: ERROR nova.compute.manager [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] vm_ref = self.build_virtual_machine(instance, [ 861.860398] env[62096]: ERROR nova.compute.manager [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 861.860398] env[62096]: ERROR nova.compute.manager [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] vif_infos = vmwarevif.get_vif_info(self._session, [ 861.860398] env[62096]: ERROR nova.compute.manager [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 861.860729] env[62096]: ERROR nova.compute.manager [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] for vif in network_info: [ 861.860729] env[62096]: ERROR nova.compute.manager [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 861.860729] env[62096]: ERROR nova.compute.manager [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] return self._sync_wrapper(fn, *args, **kwargs) [ 861.860729] env[62096]: ERROR nova.compute.manager [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 861.860729] env[62096]: ERROR nova.compute.manager [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] self.wait() [ 861.860729] env[62096]: ERROR nova.compute.manager [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 861.860729] env[62096]: ERROR nova.compute.manager [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] self[:] = self._gt.wait() [ 861.860729] env[62096]: ERROR nova.compute.manager [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 861.860729] env[62096]: ERROR nova.compute.manager [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] return self._exit_event.wait() [ 861.860729] env[62096]: ERROR nova.compute.manager [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 861.860729] env[62096]: ERROR nova.compute.manager [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] current.throw(*self._exc) [ 861.860729] env[62096]: ERROR nova.compute.manager [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 861.860729] env[62096]: ERROR nova.compute.manager [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] result = function(*args, **kwargs) [ 861.861035] env[62096]: ERROR nova.compute.manager [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 861.861035] env[62096]: ERROR nova.compute.manager [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] return func(*args, **kwargs) [ 861.861035] env[62096]: ERROR nova.compute.manager [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 861.861035] env[62096]: ERROR nova.compute.manager [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] raise e [ 861.861035] env[62096]: ERROR nova.compute.manager [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 861.861035] env[62096]: ERROR nova.compute.manager [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] nwinfo = self.network_api.allocate_for_instance( [ 861.861035] env[62096]: ERROR nova.compute.manager [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 861.861035] env[62096]: ERROR nova.compute.manager [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] created_port_ids = self._update_ports_for_instance( [ 861.861035] env[62096]: ERROR nova.compute.manager [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 861.861035] env[62096]: ERROR nova.compute.manager [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] with excutils.save_and_reraise_exception(): [ 861.861035] env[62096]: ERROR nova.compute.manager [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 861.861035] env[62096]: ERROR nova.compute.manager [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] self.force_reraise() [ 861.861035] env[62096]: ERROR nova.compute.manager [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 861.861343] env[62096]: ERROR nova.compute.manager [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] raise self.value [ 861.861343] env[62096]: ERROR nova.compute.manager [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 861.861343] env[62096]: ERROR nova.compute.manager [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] updated_port = self._update_port( [ 861.861343] env[62096]: ERROR nova.compute.manager [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 861.861343] env[62096]: ERROR nova.compute.manager [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] _ensure_no_port_binding_failure(port) [ 861.861343] env[62096]: ERROR nova.compute.manager [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 861.861343] env[62096]: ERROR nova.compute.manager [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] raise exception.PortBindingFailed(port_id=port['id']) [ 861.861343] env[62096]: ERROR nova.compute.manager [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] nova.exception.PortBindingFailed: Binding failed for port 38272d5d-1fd8-4295-91b2-3fc9e342614d, please check neutron logs for more information. [ 861.861343] env[62096]: ERROR nova.compute.manager [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] [ 861.861343] env[62096]: DEBUG nova.compute.utils [None req-265784be-d456-4382-9cd2-a751a291282e tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] Binding failed for port 38272d5d-1fd8-4295-91b2-3fc9e342614d, please check neutron logs for more information. {{(pid=62096) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 861.862536] env[62096]: DEBUG oslo_concurrency.lockutils [None req-b7c0425f-be19-4b33-b788-3180cc28ca6d tempest-InstanceActionsV221TestJSON-713498315 tempest-InstanceActionsV221TestJSON-713498315-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 12.358s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 861.864309] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b7c0425f-be19-4b33-b788-3180cc28ca6d tempest-InstanceActionsV221TestJSON-713498315 tempest-InstanceActionsV221TestJSON-713498315-project-member] Expecting reply to msg 226b7527ce324b55b0312c6186a6ead7 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 861.872035] env[62096]: DEBUG nova.compute.manager [None req-265784be-d456-4382-9cd2-a751a291282e tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] Build of instance 012afef5-91e9-4fc1-af98-c17a3188ad45 was re-scheduled: Binding failed for port 38272d5d-1fd8-4295-91b2-3fc9e342614d, please check neutron logs for more information. {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 861.872035] env[62096]: DEBUG nova.compute.manager [None req-265784be-d456-4382-9cd2-a751a291282e tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] Unplugging VIFs for instance {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 861.872035] env[62096]: DEBUG oslo_concurrency.lockutils [None req-265784be-d456-4382-9cd2-a751a291282e tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Acquiring lock "refresh_cache-012afef5-91e9-4fc1-af98-c17a3188ad45" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 861.872035] env[62096]: DEBUG oslo_concurrency.lockutils [None req-265784be-d456-4382-9cd2-a751a291282e tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Acquired lock "refresh_cache-012afef5-91e9-4fc1-af98-c17a3188ad45" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 861.872370] env[62096]: DEBUG nova.network.neutron [None req-265784be-d456-4382-9cd2-a751a291282e tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 861.872370] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-265784be-d456-4382-9cd2-a751a291282e tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg afd03b530e8248cdaac5a3d6afcdde0e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 861.879212] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg afd03b530e8248cdaac5a3d6afcdde0e [ 861.884806] env[62096]: DEBUG oslo_vmware.api [None req-c8a2d481-d63a-4d02-82f0-f3787fb45049 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Task: {'id': task-397415, 'name': PowerOnVM_Task, 'duration_secs': 0.756769} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 861.885321] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8a2d481-d63a-4d02-82f0-f3787fb45049 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] [instance: a7838568-9d47-4306-8bb6-9ad74ab1feb3] Powered on the VM {{(pid=62096) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 861.885512] env[62096]: INFO nova.compute.manager [None req-c8a2d481-d63a-4d02-82f0-f3787fb45049 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] [instance: a7838568-9d47-4306-8bb6-9ad74ab1feb3] Took 5.84 seconds to spawn the instance on the hypervisor. [ 861.885748] env[62096]: DEBUG nova.compute.manager [None req-c8a2d481-d63a-4d02-82f0-f3787fb45049 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] [instance: a7838568-9d47-4306-8bb6-9ad74ab1feb3] Checking state {{(pid=62096) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 861.886466] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20bdfdae-caf9-4c3b-b594-c1d2850789e8 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.906652] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c8a2d481-d63a-4d02-82f0-f3787fb45049 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Expecting reply to msg 310fe5bf84d641d5bebdf7adf156b462 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 861.910731] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 226b7527ce324b55b0312c6186a6ead7 [ 861.915252] env[62096]: DEBUG nova.compute.manager [None req-37030ed6-9fb4-44d5-b2b3-bbe259fd74a3 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: 60bc5e14-f495-48da-8db0-54d75b523822] Starting instance... {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 861.916091] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-37030ed6-9fb4-44d5-b2b3-bbe259fd74a3 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Expecting reply to msg 4c64e859af824abc97bf8de936149a29 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 861.944190] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 310fe5bf84d641d5bebdf7adf156b462 [ 861.964161] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4c64e859af824abc97bf8de936149a29 [ 862.071941] env[62096]: DEBUG nova.compute.manager [None req-9602e4c9-801d-4ec4-85d8-84d6ec45cb64 tempest-ServersTestJSON-242390470 tempest-ServersTestJSON-242390470-project-member] [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] Start spawning the instance on the hypervisor. {{(pid=62096) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 862.096903] env[62096]: DEBUG nova.virt.hardware [None req-9602e4c9-801d-4ec4-85d8-84d6ec45cb64 tempest-ServersTestJSON-242390470 tempest-ServersTestJSON-242390470-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-19T11:52:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-19T11:51:55Z,direct_url=,disk_format='vmdk',id=fb764baa-3805-45c4-a694-aa91b0932110,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bd76085d004b408bbcf1ebf76ba2593e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-19T11:51:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 862.097156] env[62096]: DEBUG nova.virt.hardware [None req-9602e4c9-801d-4ec4-85d8-84d6ec45cb64 tempest-ServersTestJSON-242390470 tempest-ServersTestJSON-242390470-project-member] Flavor limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 862.097407] env[62096]: DEBUG nova.virt.hardware [None req-9602e4c9-801d-4ec4-85d8-84d6ec45cb64 tempest-ServersTestJSON-242390470 tempest-ServersTestJSON-242390470-project-member] Image limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 862.097515] env[62096]: DEBUG nova.virt.hardware [None req-9602e4c9-801d-4ec4-85d8-84d6ec45cb64 tempest-ServersTestJSON-242390470 tempest-ServersTestJSON-242390470-project-member] Flavor pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 862.097599] env[62096]: DEBUG nova.virt.hardware [None req-9602e4c9-801d-4ec4-85d8-84d6ec45cb64 tempest-ServersTestJSON-242390470 tempest-ServersTestJSON-242390470-project-member] Image pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 862.097767] env[62096]: DEBUG nova.virt.hardware [None req-9602e4c9-801d-4ec4-85d8-84d6ec45cb64 tempest-ServersTestJSON-242390470 tempest-ServersTestJSON-242390470-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 862.097932] env[62096]: DEBUG nova.virt.hardware [None req-9602e4c9-801d-4ec4-85d8-84d6ec45cb64 tempest-ServersTestJSON-242390470 tempest-ServersTestJSON-242390470-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 862.098079] env[62096]: DEBUG nova.virt.hardware [None req-9602e4c9-801d-4ec4-85d8-84d6ec45cb64 tempest-ServersTestJSON-242390470 tempest-ServersTestJSON-242390470-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 862.098248] env[62096]: DEBUG nova.virt.hardware [None req-9602e4c9-801d-4ec4-85d8-84d6ec45cb64 tempest-ServersTestJSON-242390470 tempest-ServersTestJSON-242390470-project-member] Got 1 possible topologies {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 862.098405] env[62096]: DEBUG nova.virt.hardware [None req-9602e4c9-801d-4ec4-85d8-84d6ec45cb64 tempest-ServersTestJSON-242390470 tempest-ServersTestJSON-242390470-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 862.098569] env[62096]: DEBUG nova.virt.hardware [None req-9602e4c9-801d-4ec4-85d8-84d6ec45cb64 tempest-ServersTestJSON-242390470 tempest-ServersTestJSON-242390470-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 862.099745] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2fe9a38-3885-404b-90cb-f9da22a94b98 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.108536] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c03b182e-f423-4283-9cdd-b8cd70c8285a {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.210953] env[62096]: DEBUG oslo_vmware.api [None req-cc6c4fa9-fc85-41c0-a8a8-237fec5b300e tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Task: {'id': task-397418, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.28934} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.211212] env[62096]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc6c4fa9-fc85-41c0-a8a8-237fec5b300e tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Deleted the datastore file {{(pid=62096) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 862.211398] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-cc6c4fa9-fc85-41c0-a8a8-237fec5b300e tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] [instance: 250ef7e7-266b-451d-8627-9cce211d4e83] Deleted contents of the VM from datastore datastore2 {{(pid=62096) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 862.211575] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-cc6c4fa9-fc85-41c0-a8a8-237fec5b300e tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] [instance: 250ef7e7-266b-451d-8627-9cce211d4e83] Instance destroyed {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 862.212230] env[62096]: INFO nova.compute.manager [None req-cc6c4fa9-fc85-41c0-a8a8-237fec5b300e tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] [instance: 250ef7e7-266b-451d-8627-9cce211d4e83] Took 1.07 seconds to destroy the instance on the hypervisor. [ 862.212501] env[62096]: DEBUG oslo.service.loopingcall [None req-cc6c4fa9-fc85-41c0-a8a8-237fec5b300e tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 862.213887] env[62096]: DEBUG nova.compute.manager [-] [instance: 250ef7e7-266b-451d-8627-9cce211d4e83] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 862.213999] env[62096]: DEBUG nova.network.neutron [-] [instance: 250ef7e7-266b-451d-8627-9cce211d4e83] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 862.219321] env[62096]: DEBUG oslo_concurrency.lockutils [None req-b46ae41d-7569-429f-b0df-d60640b27629 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Acquiring lock "1c02c726-ab23-49e5-8d55-b222c712225a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 862.219321] env[62096]: DEBUG oslo_concurrency.lockutils [None req-b46ae41d-7569-429f-b0df-d60640b27629 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Lock "1c02c726-ab23-49e5-8d55-b222c712225a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 862.234979] env[62096]: DEBUG nova.network.neutron [-] [instance: 250ef7e7-266b-451d-8627-9cce211d4e83] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 862.235560] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg be19649b6cb84788af1fa08f58f29779 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 862.241916] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg be19649b6cb84788af1fa08f58f29779 [ 862.397620] env[62096]: DEBUG nova.network.neutron [None req-265784be-d456-4382-9cd2-a751a291282e tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 862.422105] env[62096]: INFO nova.compute.manager [None req-c8a2d481-d63a-4d02-82f0-f3787fb45049 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] [instance: a7838568-9d47-4306-8bb6-9ad74ab1feb3] Took 26.92 seconds to build instance. [ 862.422715] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c8a2d481-d63a-4d02-82f0-f3787fb45049 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Expecting reply to msg 453922843aa7486caf3b37f469c740da in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 862.441526] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 453922843aa7486caf3b37f469c740da [ 862.457879] env[62096]: DEBUG oslo_concurrency.lockutils [None req-37030ed6-9fb4-44d5-b2b3-bbe259fd74a3 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 862.464966] env[62096]: DEBUG nova.compute.manager [req-55597ba8-6716-411b-ab2e-3f2c68a71088 req-07e390a9-2282-4449-8c76-ea5ec3b73b5d service nova] [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] Received event network-changed-b1b64d64-3759-4701-ae6a-64d26a18db32 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 862.465340] env[62096]: DEBUG nova.compute.manager [req-55597ba8-6716-411b-ab2e-3f2c68a71088 req-07e390a9-2282-4449-8c76-ea5ec3b73b5d service nova] [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] Refreshing instance network info cache due to event network-changed-b1b64d64-3759-4701-ae6a-64d26a18db32. {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 862.466014] env[62096]: DEBUG oslo_concurrency.lockutils [req-55597ba8-6716-411b-ab2e-3f2c68a71088 req-07e390a9-2282-4449-8c76-ea5ec3b73b5d service nova] Acquiring lock "refresh_cache-5c04dded-d1c9-44fe-bf2f-de295d21b725" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 862.466263] env[62096]: DEBUG oslo_concurrency.lockutils [req-55597ba8-6716-411b-ab2e-3f2c68a71088 req-07e390a9-2282-4449-8c76-ea5ec3b73b5d service nova] Acquired lock "refresh_cache-5c04dded-d1c9-44fe-bf2f-de295d21b725" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 862.466701] env[62096]: DEBUG nova.network.neutron [req-55597ba8-6716-411b-ab2e-3f2c68a71088 req-07e390a9-2282-4449-8c76-ea5ec3b73b5d service nova] [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] Refreshing network info cache for port b1b64d64-3759-4701-ae6a-64d26a18db32 {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 862.467200] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-55597ba8-6716-411b-ab2e-3f2c68a71088 req-07e390a9-2282-4449-8c76-ea5ec3b73b5d service nova] Expecting reply to msg 45572566ddae430ab087d8fd51510729 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 862.475567] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 45572566ddae430ab087d8fd51510729 [ 862.548102] env[62096]: DEBUG nova.network.neutron [None req-265784be-d456-4382-9cd2-a751a291282e tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 862.548776] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-265784be-d456-4382-9cd2-a751a291282e tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg 292dbc95ba33463bb1ec2962c5a4f96f in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 862.557225] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 292dbc95ba33463bb1ec2962c5a4f96f [ 862.600304] env[62096]: ERROR nova.compute.manager [None req-9602e4c9-801d-4ec4-85d8-84d6ec45cb64 tempest-ServersTestJSON-242390470 tempest-ServersTestJSON-242390470-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port b1b64d64-3759-4701-ae6a-64d26a18db32, please check neutron logs for more information. [ 862.600304] env[62096]: ERROR nova.compute.manager Traceback (most recent call last): [ 862.600304] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 862.600304] env[62096]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 862.600304] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 862.600304] env[62096]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 862.600304] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 862.600304] env[62096]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 862.600304] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 862.600304] env[62096]: ERROR nova.compute.manager self.force_reraise() [ 862.600304] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 862.600304] env[62096]: ERROR nova.compute.manager raise self.value [ 862.600304] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 862.600304] env[62096]: ERROR nova.compute.manager updated_port = self._update_port( [ 862.600304] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 862.600304] env[62096]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 862.600935] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 862.600935] env[62096]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 862.600935] env[62096]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port b1b64d64-3759-4701-ae6a-64d26a18db32, please check neutron logs for more information. [ 862.600935] env[62096]: ERROR nova.compute.manager [ 862.601361] env[62096]: Traceback (most recent call last): [ 862.601482] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 862.601482] env[62096]: listener.cb(fileno) [ 862.601613] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 862.601613] env[62096]: result = function(*args, **kwargs) [ 862.601691] env[62096]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 862.601691] env[62096]: return func(*args, **kwargs) [ 862.602666] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 862.602666] env[62096]: raise e [ 862.602750] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 862.602750] env[62096]: nwinfo = self.network_api.allocate_for_instance( [ 862.602829] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 862.602829] env[62096]: created_port_ids = self._update_ports_for_instance( [ 862.602910] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 862.602910] env[62096]: with excutils.save_and_reraise_exception(): [ 862.602986] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 862.602986] env[62096]: self.force_reraise() [ 862.603060] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 862.603060] env[62096]: raise self.value [ 862.603131] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 862.603131] env[62096]: updated_port = self._update_port( [ 862.603203] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 862.603203] env[62096]: _ensure_no_port_binding_failure(port) [ 862.603284] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 862.603284] env[62096]: raise exception.PortBindingFailed(port_id=port['id']) [ 862.603376] env[62096]: nova.exception.PortBindingFailed: Binding failed for port b1b64d64-3759-4701-ae6a-64d26a18db32, please check neutron logs for more information. [ 862.603432] env[62096]: Removing descriptor: 16 [ 862.604505] env[62096]: ERROR nova.compute.manager [None req-9602e4c9-801d-4ec4-85d8-84d6ec45cb64 tempest-ServersTestJSON-242390470 tempest-ServersTestJSON-242390470-project-member] [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port b1b64d64-3759-4701-ae6a-64d26a18db32, please check neutron logs for more information. [ 862.604505] env[62096]: ERROR nova.compute.manager [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] Traceback (most recent call last): [ 862.604505] env[62096]: ERROR nova.compute.manager [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 862.604505] env[62096]: ERROR nova.compute.manager [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] yield resources [ 862.604505] env[62096]: ERROR nova.compute.manager [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 862.604505] env[62096]: ERROR nova.compute.manager [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] self.driver.spawn(context, instance, image_meta, [ 862.604505] env[62096]: ERROR nova.compute.manager [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 862.604505] env[62096]: ERROR nova.compute.manager [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] self._vmops.spawn(context, instance, image_meta, injected_files, [ 862.604505] env[62096]: ERROR nova.compute.manager [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 862.604505] env[62096]: ERROR nova.compute.manager [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] vm_ref = self.build_virtual_machine(instance, [ 862.604505] env[62096]: ERROR nova.compute.manager [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 862.604829] env[62096]: ERROR nova.compute.manager [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] vif_infos = vmwarevif.get_vif_info(self._session, [ 862.604829] env[62096]: ERROR nova.compute.manager [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 862.604829] env[62096]: ERROR nova.compute.manager [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] for vif in network_info: [ 862.604829] env[62096]: ERROR nova.compute.manager [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 862.604829] env[62096]: ERROR nova.compute.manager [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] return self._sync_wrapper(fn, *args, **kwargs) [ 862.604829] env[62096]: ERROR nova.compute.manager [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 862.604829] env[62096]: ERROR nova.compute.manager [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] self.wait() [ 862.604829] env[62096]: ERROR nova.compute.manager [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 862.604829] env[62096]: ERROR nova.compute.manager [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] self[:] = self._gt.wait() [ 862.604829] env[62096]: ERROR nova.compute.manager [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 862.604829] env[62096]: ERROR nova.compute.manager [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] return self._exit_event.wait() [ 862.604829] env[62096]: ERROR nova.compute.manager [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 862.604829] env[62096]: ERROR nova.compute.manager [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] result = hub.switch() [ 862.605210] env[62096]: ERROR nova.compute.manager [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 862.605210] env[62096]: ERROR nova.compute.manager [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] return self.greenlet.switch() [ 862.605210] env[62096]: ERROR nova.compute.manager [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 862.605210] env[62096]: ERROR nova.compute.manager [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] result = function(*args, **kwargs) [ 862.605210] env[62096]: ERROR nova.compute.manager [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 862.605210] env[62096]: ERROR nova.compute.manager [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] return func(*args, **kwargs) [ 862.605210] env[62096]: ERROR nova.compute.manager [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 862.605210] env[62096]: ERROR nova.compute.manager [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] raise e [ 862.605210] env[62096]: ERROR nova.compute.manager [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 862.605210] env[62096]: ERROR nova.compute.manager [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] nwinfo = self.network_api.allocate_for_instance( [ 862.605210] env[62096]: ERROR nova.compute.manager [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 862.605210] env[62096]: ERROR nova.compute.manager [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] created_port_ids = self._update_ports_for_instance( [ 862.605210] env[62096]: ERROR nova.compute.manager [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 862.605532] env[62096]: ERROR nova.compute.manager [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] with excutils.save_and_reraise_exception(): [ 862.605532] env[62096]: ERROR nova.compute.manager [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 862.605532] env[62096]: ERROR nova.compute.manager [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] self.force_reraise() [ 862.605532] env[62096]: ERROR nova.compute.manager [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 862.605532] env[62096]: ERROR nova.compute.manager [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] raise self.value [ 862.605532] env[62096]: ERROR nova.compute.manager [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 862.605532] env[62096]: ERROR nova.compute.manager [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] updated_port = self._update_port( [ 862.605532] env[62096]: ERROR nova.compute.manager [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 862.605532] env[62096]: ERROR nova.compute.manager [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] _ensure_no_port_binding_failure(port) [ 862.605532] env[62096]: ERROR nova.compute.manager [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 862.605532] env[62096]: ERROR nova.compute.manager [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] raise exception.PortBindingFailed(port_id=port['id']) [ 862.605532] env[62096]: ERROR nova.compute.manager [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] nova.exception.PortBindingFailed: Binding failed for port b1b64d64-3759-4701-ae6a-64d26a18db32, please check neutron logs for more information. [ 862.605532] env[62096]: ERROR nova.compute.manager [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] [ 862.606648] env[62096]: INFO nova.compute.manager [None req-9602e4c9-801d-4ec4-85d8-84d6ec45cb64 tempest-ServersTestJSON-242390470 tempest-ServersTestJSON-242390470-project-member] [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] Terminating instance [ 862.611259] env[62096]: DEBUG oslo_concurrency.lockutils [None req-9602e4c9-801d-4ec4-85d8-84d6ec45cb64 tempest-ServersTestJSON-242390470 tempest-ServersTestJSON-242390470-project-member] Acquiring lock "refresh_cache-5c04dded-d1c9-44fe-bf2f-de295d21b725" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 862.633620] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfd12541-8574-4567-8390-3245135cd88f {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.642177] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-903c8d05-d30c-402f-bb0d-b1516b49cc09 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.676778] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89490a8f-5c50-4584-b225-0e1059c43af9 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.684061] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6ad9795-2457-4132-95e3-6b41e17257e8 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.699322] env[62096]: DEBUG nova.compute.provider_tree [None req-b7c0425f-be19-4b33-b788-3180cc28ca6d tempest-InstanceActionsV221TestJSON-713498315 tempest-InstanceActionsV221TestJSON-713498315-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 862.700053] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b7c0425f-be19-4b33-b788-3180cc28ca6d tempest-InstanceActionsV221TestJSON-713498315 tempest-InstanceActionsV221TestJSON-713498315-project-member] Expecting reply to msg e167ce21ee6c40d49996bca32c5badae in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 862.707704] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e167ce21ee6c40d49996bca32c5badae [ 862.737983] env[62096]: DEBUG nova.network.neutron [-] [instance: 250ef7e7-266b-451d-8627-9cce211d4e83] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 862.738427] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 975019dc1bcd4b5f82df96aaca2684e4 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 862.746437] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 975019dc1bcd4b5f82df96aaca2684e4 [ 862.926302] env[62096]: DEBUG oslo_concurrency.lockutils [None req-c8a2d481-d63a-4d02-82f0-f3787fb45049 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Lock "a7838568-9d47-4306-8bb6-9ad74ab1feb3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 108.132s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 862.926302] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c70231a0-48a5-4b3b-b009-020c7f69a59b tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Expecting reply to msg 4aaa978a50c8427eb8e35521a6aad4a9 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 862.935182] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4aaa978a50c8427eb8e35521a6aad4a9 [ 862.992753] env[62096]: DEBUG nova.network.neutron [req-55597ba8-6716-411b-ab2e-3f2c68a71088 req-07e390a9-2282-4449-8c76-ea5ec3b73b5d service nova] [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 863.031049] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-e0ec7c35-3ae1-4bda-b6d5-d68ea34f2cbc tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Expecting reply to msg 52a7b616c13a444e8af7f500a1158133 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 863.040160] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 52a7b616c13a444e8af7f500a1158133 [ 863.051294] env[62096]: DEBUG oslo_concurrency.lockutils [None req-265784be-d456-4382-9cd2-a751a291282e tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Releasing lock "refresh_cache-012afef5-91e9-4fc1-af98-c17a3188ad45" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 863.051508] env[62096]: DEBUG nova.compute.manager [None req-265784be-d456-4382-9cd2-a751a291282e tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 863.051688] env[62096]: DEBUG nova.compute.manager [None req-265784be-d456-4382-9cd2-a751a291282e tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 863.051850] env[62096]: DEBUG nova.network.neutron [None req-265784be-d456-4382-9cd2-a751a291282e tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 863.066965] env[62096]: DEBUG nova.network.neutron [None req-265784be-d456-4382-9cd2-a751a291282e tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 863.067487] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-265784be-d456-4382-9cd2-a751a291282e tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg e829648edd4f44489ba02e78f685f740 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 863.073528] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e829648edd4f44489ba02e78f685f740 [ 863.131431] env[62096]: DEBUG nova.network.neutron [req-55597ba8-6716-411b-ab2e-3f2c68a71088 req-07e390a9-2282-4449-8c76-ea5ec3b73b5d service nova] [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 863.131980] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-55597ba8-6716-411b-ab2e-3f2c68a71088 req-07e390a9-2282-4449-8c76-ea5ec3b73b5d service nova] Expecting reply to msg 6dd6685c91a34697a5311677452a706b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 863.139755] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6dd6685c91a34697a5311677452a706b [ 863.204197] env[62096]: DEBUG nova.scheduler.client.report [None req-b7c0425f-be19-4b33-b788-3180cc28ca6d tempest-InstanceActionsV221TestJSON-713498315 tempest-InstanceActionsV221TestJSON-713498315-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 863.206868] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b7c0425f-be19-4b33-b788-3180cc28ca6d tempest-InstanceActionsV221TestJSON-713498315 tempest-InstanceActionsV221TestJSON-713498315-project-member] Expecting reply to msg 6384b2153059483ea4f30cad661e0694 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 863.223931] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6384b2153059483ea4f30cad661e0694 [ 863.240719] env[62096]: INFO nova.compute.manager [-] [instance: 250ef7e7-266b-451d-8627-9cce211d4e83] Took 1.03 seconds to deallocate network for instance. [ 863.246827] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-cc6c4fa9-fc85-41c0-a8a8-237fec5b300e tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Expecting reply to msg 552d84f39e474faaa37948367f16baf5 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 863.278261] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 552d84f39e474faaa37948367f16baf5 [ 863.427782] env[62096]: DEBUG nova.compute.manager [None req-c70231a0-48a5-4b3b-b009-020c7f69a59b tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] Starting instance... {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 863.429734] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c70231a0-48a5-4b3b-b009-020c7f69a59b tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Expecting reply to msg 90994ec5aea14122866ddb9edc2eedc8 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 863.463312] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 90994ec5aea14122866ddb9edc2eedc8 [ 863.533692] env[62096]: INFO nova.compute.manager [None req-e0ec7c35-3ae1-4bda-b6d5-d68ea34f2cbc tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] [instance: a7838568-9d47-4306-8bb6-9ad74ab1feb3] Rebuilding instance [ 863.569118] env[62096]: DEBUG nova.network.neutron [None req-265784be-d456-4382-9cd2-a751a291282e tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 863.569620] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-265784be-d456-4382-9cd2-a751a291282e tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg 7afdd71e2df140c7985f59805bdbe8b2 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 863.574370] env[62096]: DEBUG nova.compute.manager [None req-e0ec7c35-3ae1-4bda-b6d5-d68ea34f2cbc tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] [instance: a7838568-9d47-4306-8bb6-9ad74ab1feb3] Checking state {{(pid=62096) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 863.575187] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50786c2f-69c2-42e3-8fd6-a11af57e057c {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.578986] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7afdd71e2df140c7985f59805bdbe8b2 [ 863.583340] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-e0ec7c35-3ae1-4bda-b6d5-d68ea34f2cbc tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Expecting reply to msg 7832da2871e341fbaa418a7405c1a848 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 863.611214] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7832da2871e341fbaa418a7405c1a848 [ 863.634048] env[62096]: DEBUG oslo_concurrency.lockutils [req-55597ba8-6716-411b-ab2e-3f2c68a71088 req-07e390a9-2282-4449-8c76-ea5ec3b73b5d service nova] Releasing lock "refresh_cache-5c04dded-d1c9-44fe-bf2f-de295d21b725" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 863.634438] env[62096]: DEBUG oslo_concurrency.lockutils [None req-9602e4c9-801d-4ec4-85d8-84d6ec45cb64 tempest-ServersTestJSON-242390470 tempest-ServersTestJSON-242390470-project-member] Acquired lock "refresh_cache-5c04dded-d1c9-44fe-bf2f-de295d21b725" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 863.634612] env[62096]: DEBUG nova.network.neutron [None req-9602e4c9-801d-4ec4-85d8-84d6ec45cb64 tempest-ServersTestJSON-242390470 tempest-ServersTestJSON-242390470-project-member] [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 863.635013] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-9602e4c9-801d-4ec4-85d8-84d6ec45cb64 tempest-ServersTestJSON-242390470 tempest-ServersTestJSON-242390470-project-member] Expecting reply to msg 93273eb97d2b426fa9ea128b04fef318 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 863.641097] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 93273eb97d2b426fa9ea128b04fef318 [ 863.715202] env[62096]: DEBUG oslo_concurrency.lockutils [None req-b7c0425f-be19-4b33-b788-3180cc28ca6d tempest-InstanceActionsV221TestJSON-713498315 tempest-InstanceActionsV221TestJSON-713498315-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.853s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 863.715809] env[62096]: ERROR nova.compute.manager [None req-b7c0425f-be19-4b33-b788-3180cc28ca6d tempest-InstanceActionsV221TestJSON-713498315 tempest-InstanceActionsV221TestJSON-713498315-project-member] [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port b6fb4595-6c02-4377-916c-a91aaa951bfb, please check neutron logs for more information. [ 863.715809] env[62096]: ERROR nova.compute.manager [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] Traceback (most recent call last): [ 863.715809] env[62096]: ERROR nova.compute.manager [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 863.715809] env[62096]: ERROR nova.compute.manager [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] self.driver.spawn(context, instance, image_meta, [ 863.715809] env[62096]: ERROR nova.compute.manager [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 863.715809] env[62096]: ERROR nova.compute.manager [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] self._vmops.spawn(context, instance, image_meta, injected_files, [ 863.715809] env[62096]: ERROR nova.compute.manager [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 863.715809] env[62096]: ERROR nova.compute.manager [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] vm_ref = self.build_virtual_machine(instance, [ 863.715809] env[62096]: ERROR nova.compute.manager [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 863.715809] env[62096]: ERROR nova.compute.manager [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] vif_infos = vmwarevif.get_vif_info(self._session, [ 863.715809] env[62096]: ERROR nova.compute.manager [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 863.716153] env[62096]: ERROR nova.compute.manager [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] for vif in network_info: [ 863.716153] env[62096]: ERROR nova.compute.manager [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 863.716153] env[62096]: ERROR nova.compute.manager [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] return self._sync_wrapper(fn, *args, **kwargs) [ 863.716153] env[62096]: ERROR nova.compute.manager [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 863.716153] env[62096]: ERROR nova.compute.manager [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] self.wait() [ 863.716153] env[62096]: ERROR nova.compute.manager [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 863.716153] env[62096]: ERROR nova.compute.manager [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] self[:] = self._gt.wait() [ 863.716153] env[62096]: ERROR nova.compute.manager [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 863.716153] env[62096]: ERROR nova.compute.manager [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] return self._exit_event.wait() [ 863.716153] env[62096]: ERROR nova.compute.manager [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 863.716153] env[62096]: ERROR nova.compute.manager [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] current.throw(*self._exc) [ 863.716153] env[62096]: ERROR nova.compute.manager [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 863.716153] env[62096]: ERROR nova.compute.manager [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] result = function(*args, **kwargs) [ 863.716504] env[62096]: ERROR nova.compute.manager [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 863.716504] env[62096]: ERROR nova.compute.manager [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] return func(*args, **kwargs) [ 863.716504] env[62096]: ERROR nova.compute.manager [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 863.716504] env[62096]: ERROR nova.compute.manager [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] raise e [ 863.716504] env[62096]: ERROR nova.compute.manager [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 863.716504] env[62096]: ERROR nova.compute.manager [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] nwinfo = self.network_api.allocate_for_instance( [ 863.716504] env[62096]: ERROR nova.compute.manager [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 863.716504] env[62096]: ERROR nova.compute.manager [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] created_port_ids = self._update_ports_for_instance( [ 863.716504] env[62096]: ERROR nova.compute.manager [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 863.716504] env[62096]: ERROR nova.compute.manager [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] with excutils.save_and_reraise_exception(): [ 863.716504] env[62096]: ERROR nova.compute.manager [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 863.716504] env[62096]: ERROR nova.compute.manager [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] self.force_reraise() [ 863.716504] env[62096]: ERROR nova.compute.manager [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 863.716869] env[62096]: ERROR nova.compute.manager [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] raise self.value [ 863.716869] env[62096]: ERROR nova.compute.manager [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 863.716869] env[62096]: ERROR nova.compute.manager [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] updated_port = self._update_port( [ 863.716869] env[62096]: ERROR nova.compute.manager [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 863.716869] env[62096]: ERROR nova.compute.manager [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] _ensure_no_port_binding_failure(port) [ 863.716869] env[62096]: ERROR nova.compute.manager [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 863.716869] env[62096]: ERROR nova.compute.manager [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] raise exception.PortBindingFailed(port_id=port['id']) [ 863.716869] env[62096]: ERROR nova.compute.manager [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] nova.exception.PortBindingFailed: Binding failed for port b6fb4595-6c02-4377-916c-a91aaa951bfb, please check neutron logs for more information. [ 863.716869] env[62096]: ERROR nova.compute.manager [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] [ 863.716869] env[62096]: DEBUG nova.compute.utils [None req-b7c0425f-be19-4b33-b788-3180cc28ca6d tempest-InstanceActionsV221TestJSON-713498315 tempest-InstanceActionsV221TestJSON-713498315-project-member] [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] Binding failed for port b6fb4595-6c02-4377-916c-a91aaa951bfb, please check neutron logs for more information. {{(pid=62096) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 863.717624] env[62096]: DEBUG oslo_concurrency.lockutils [None req-bb5902e0-898e-4848-be60-0804ae5f57a5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.485s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 863.719117] env[62096]: INFO nova.compute.claims [None req-bb5902e0-898e-4848-be60-0804ae5f57a5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: aa3e06b6-89f8-425f-8950-30deb025a473] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 863.720698] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-bb5902e0-898e-4848-be60-0804ae5f57a5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg 99399cd537614a4e8cc48aba751e42d4 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 863.721856] env[62096]: DEBUG nova.compute.manager [None req-b7c0425f-be19-4b33-b788-3180cc28ca6d tempest-InstanceActionsV221TestJSON-713498315 tempest-InstanceActionsV221TestJSON-713498315-project-member] [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] Build of instance fcf84c67-fdea-41d7-aed9-690a45c97eaa was re-scheduled: Binding failed for port b6fb4595-6c02-4377-916c-a91aaa951bfb, please check neutron logs for more information. {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 863.722319] env[62096]: DEBUG nova.compute.manager [None req-b7c0425f-be19-4b33-b788-3180cc28ca6d tempest-InstanceActionsV221TestJSON-713498315 tempest-InstanceActionsV221TestJSON-713498315-project-member] [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] Unplugging VIFs for instance {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 863.722533] env[62096]: DEBUG oslo_concurrency.lockutils [None req-b7c0425f-be19-4b33-b788-3180cc28ca6d tempest-InstanceActionsV221TestJSON-713498315 tempest-InstanceActionsV221TestJSON-713498315-project-member] Acquiring lock "refresh_cache-fcf84c67-fdea-41d7-aed9-690a45c97eaa" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 863.722679] env[62096]: DEBUG oslo_concurrency.lockutils [None req-b7c0425f-be19-4b33-b788-3180cc28ca6d tempest-InstanceActionsV221TestJSON-713498315 tempest-InstanceActionsV221TestJSON-713498315-project-member] Acquired lock "refresh_cache-fcf84c67-fdea-41d7-aed9-690a45c97eaa" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 863.722836] env[62096]: DEBUG nova.network.neutron [None req-b7c0425f-be19-4b33-b788-3180cc28ca6d tempest-InstanceActionsV221TestJSON-713498315 tempest-InstanceActionsV221TestJSON-713498315-project-member] [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 863.723189] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b7c0425f-be19-4b33-b788-3180cc28ca6d tempest-InstanceActionsV221TestJSON-713498315 tempest-InstanceActionsV221TestJSON-713498315-project-member] Expecting reply to msg 898a6acf6fdb45f489cea757b2a8e4f0 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 863.728874] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 898a6acf6fdb45f489cea757b2a8e4f0 [ 863.749588] env[62096]: DEBUG oslo_concurrency.lockutils [None req-cc6c4fa9-fc85-41c0-a8a8-237fec5b300e tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 863.754243] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 99399cd537614a4e8cc48aba751e42d4 [ 863.896667] env[62096]: DEBUG oslo_concurrency.lockutils [None req-064fa4d0-df95-4930-985c-d30de278cda0 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Acquiring lock "8802af61-3692-4e27-85f4-945afccff231" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 863.896916] env[62096]: DEBUG oslo_concurrency.lockutils [None req-064fa4d0-df95-4930-985c-d30de278cda0 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Lock "8802af61-3692-4e27-85f4-945afccff231" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 863.945699] env[62096]: DEBUG oslo_concurrency.lockutils [None req-c70231a0-48a5-4b3b-b009-020c7f69a59b tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 864.073459] env[62096]: INFO nova.compute.manager [None req-265784be-d456-4382-9cd2-a751a291282e tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 012afef5-91e9-4fc1-af98-c17a3188ad45] Took 1.02 seconds to deallocate network for instance. [ 864.075186] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-265784be-d456-4382-9cd2-a751a291282e tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg 13572ee1cb474bc0a24a7c95d79a2df1 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 864.085856] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0ec7c35-3ae1-4bda-b6d5-d68ea34f2cbc tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] [instance: a7838568-9d47-4306-8bb6-9ad74ab1feb3] Powering off the VM {{(pid=62096) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 864.086118] env[62096]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1d093313-2def-4363-a516-642536320d8d {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.093836] env[62096]: DEBUG oslo_vmware.api [None req-e0ec7c35-3ae1-4bda-b6d5-d68ea34f2cbc tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Waiting for the task: (returnval){ [ 864.093836] env[62096]: value = "task-397419" [ 864.093836] env[62096]: _type = "Task" [ 864.093836] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 864.104157] env[62096]: DEBUG oslo_vmware.api [None req-e0ec7c35-3ae1-4bda-b6d5-d68ea34f2cbc tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Task: {'id': task-397419, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.112848] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 13572ee1cb474bc0a24a7c95d79a2df1 [ 864.150793] env[62096]: DEBUG nova.network.neutron [None req-9602e4c9-801d-4ec4-85d8-84d6ec45cb64 tempest-ServersTestJSON-242390470 tempest-ServersTestJSON-242390470-project-member] [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 864.226796] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-bb5902e0-898e-4848-be60-0804ae5f57a5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg 499b59d52b45480b816ce257c210554c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 864.234733] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 499b59d52b45480b816ce257c210554c [ 864.244528] env[62096]: DEBUG nova.network.neutron [None req-b7c0425f-be19-4b33-b788-3180cc28ca6d tempest-InstanceActionsV221TestJSON-713498315 tempest-InstanceActionsV221TestJSON-713498315-project-member] [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 864.246920] env[62096]: DEBUG nova.network.neutron [None req-9602e4c9-801d-4ec4-85d8-84d6ec45cb64 tempest-ServersTestJSON-242390470 tempest-ServersTestJSON-242390470-project-member] [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 864.247423] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-9602e4c9-801d-4ec4-85d8-84d6ec45cb64 tempest-ServersTestJSON-242390470 tempest-ServersTestJSON-242390470-project-member] Expecting reply to msg 32952df6b0234149a1bc70f21cf8d35f in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 864.256043] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 32952df6b0234149a1bc70f21cf8d35f [ 864.316532] env[62096]: DEBUG nova.network.neutron [None req-b7c0425f-be19-4b33-b788-3180cc28ca6d tempest-InstanceActionsV221TestJSON-713498315 tempest-InstanceActionsV221TestJSON-713498315-project-member] [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 864.317243] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b7c0425f-be19-4b33-b788-3180cc28ca6d tempest-InstanceActionsV221TestJSON-713498315 tempest-InstanceActionsV221TestJSON-713498315-project-member] Expecting reply to msg 3db67bf120ee4784a6359976347fcd7b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 864.325195] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3db67bf120ee4784a6359976347fcd7b [ 864.512988] env[62096]: DEBUG nova.compute.manager [req-910fed8c-3be3-4d2d-972e-cc6d56d882f1 req-24517650-6c13-48fd-ac84-7654da02f314 service nova] [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] Received event network-vif-deleted-b1b64d64-3759-4701-ae6a-64d26a18db32 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 864.579805] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-265784be-d456-4382-9cd2-a751a291282e tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg f374c66efe1e406396c40822510d28da in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 864.603583] env[62096]: DEBUG oslo_vmware.api [None req-e0ec7c35-3ae1-4bda-b6d5-d68ea34f2cbc tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Task: {'id': task-397419, 'name': PowerOffVM_Task, 'duration_secs': 0.174127} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 864.603834] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0ec7c35-3ae1-4bda-b6d5-d68ea34f2cbc tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] [instance: a7838568-9d47-4306-8bb6-9ad74ab1feb3] Powered off the VM {{(pid=62096) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 864.604119] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-e0ec7c35-3ae1-4bda-b6d5-d68ea34f2cbc tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] [instance: a7838568-9d47-4306-8bb6-9ad74ab1feb3] Destroying instance {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 864.604858] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4652769-c34b-4baa-984f-b66554134de3 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.612342] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-e0ec7c35-3ae1-4bda-b6d5-d68ea34f2cbc tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] [instance: a7838568-9d47-4306-8bb6-9ad74ab1feb3] Unregistering the VM {{(pid=62096) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 864.612911] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f374c66efe1e406396c40822510d28da [ 864.613297] env[62096]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-96196da5-ce43-4ab4-a899-9e748db9fe57 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.635885] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-e0ec7c35-3ae1-4bda-b6d5-d68ea34f2cbc tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] [instance: a7838568-9d47-4306-8bb6-9ad74ab1feb3] Unregistered the VM {{(pid=62096) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 864.636092] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-e0ec7c35-3ae1-4bda-b6d5-d68ea34f2cbc tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] [instance: a7838568-9d47-4306-8bb6-9ad74ab1feb3] Deleting contents of the VM from datastore datastore2 {{(pid=62096) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 864.636272] env[62096]: DEBUG nova.virt.vmwareapi.ds_util [None req-e0ec7c35-3ae1-4bda-b6d5-d68ea34f2cbc tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Deleting the datastore file [datastore2] a7838568-9d47-4306-8bb6-9ad74ab1feb3 {{(pid=62096) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 864.636533] env[62096]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8ce2e6ff-b7b4-4bfc-afaf-9266d704401c {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.643488] env[62096]: DEBUG oslo_vmware.api [None req-e0ec7c35-3ae1-4bda-b6d5-d68ea34f2cbc tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Waiting for the task: (returnval){ [ 864.643488] env[62096]: value = "task-397421" [ 864.643488] env[62096]: _type = "Task" [ 864.643488] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 864.650907] env[62096]: DEBUG oslo_vmware.api [None req-e0ec7c35-3ae1-4bda-b6d5-d68ea34f2cbc tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Task: {'id': task-397421, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.749387] env[62096]: DEBUG oslo_concurrency.lockutils [None req-9602e4c9-801d-4ec4-85d8-84d6ec45cb64 tempest-ServersTestJSON-242390470 tempest-ServersTestJSON-242390470-project-member] Releasing lock "refresh_cache-5c04dded-d1c9-44fe-bf2f-de295d21b725" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 864.749979] env[62096]: DEBUG nova.compute.manager [None req-9602e4c9-801d-4ec4-85d8-84d6ec45cb64 tempest-ServersTestJSON-242390470 tempest-ServersTestJSON-242390470-project-member] [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] Start destroying the instance on the hypervisor. {{(pid=62096) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 864.750162] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-9602e4c9-801d-4ec4-85d8-84d6ec45cb64 tempest-ServersTestJSON-242390470 tempest-ServersTestJSON-242390470-project-member] [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] Destroying instance {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 864.750469] env[62096]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-38fc670c-281a-484a-a6a1-a86b95ff8396 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.759173] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1b2b071-d3b3-4dd4-b011-a70cab179335 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.784527] env[62096]: WARNING nova.virt.vmwareapi.vmops [None req-9602e4c9-801d-4ec4-85d8-84d6ec45cb64 tempest-ServersTestJSON-242390470 tempest-ServersTestJSON-242390470-project-member] [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 5c04dded-d1c9-44fe-bf2f-de295d21b725 could not be found. [ 864.784527] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-9602e4c9-801d-4ec4-85d8-84d6ec45cb64 tempest-ServersTestJSON-242390470 tempest-ServersTestJSON-242390470-project-member] [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] Instance destroyed {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 864.784527] env[62096]: INFO nova.compute.manager [None req-9602e4c9-801d-4ec4-85d8-84d6ec45cb64 tempest-ServersTestJSON-242390470 tempest-ServersTestJSON-242390470-project-member] [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] Took 0.03 seconds to destroy the instance on the hypervisor. [ 864.784960] env[62096]: DEBUG oslo.service.loopingcall [None req-9602e4c9-801d-4ec4-85d8-84d6ec45cb64 tempest-ServersTestJSON-242390470 tempest-ServersTestJSON-242390470-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 864.786895] env[62096]: DEBUG nova.compute.manager [-] [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 864.787042] env[62096]: DEBUG nova.network.neutron [-] [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 864.803154] env[62096]: DEBUG nova.network.neutron [-] [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 864.803766] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 0f94d89ca60742fc9f36b1d9746e232d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 864.811104] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0f94d89ca60742fc9f36b1d9746e232d [ 864.819558] env[62096]: DEBUG oslo_concurrency.lockutils [None req-b7c0425f-be19-4b33-b788-3180cc28ca6d tempest-InstanceActionsV221TestJSON-713498315 tempest-InstanceActionsV221TestJSON-713498315-project-member] Releasing lock "refresh_cache-fcf84c67-fdea-41d7-aed9-690a45c97eaa" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 864.819770] env[62096]: DEBUG nova.compute.manager [None req-b7c0425f-be19-4b33-b788-3180cc28ca6d tempest-InstanceActionsV221TestJSON-713498315 tempest-InstanceActionsV221TestJSON-713498315-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 864.819947] env[62096]: DEBUG nova.compute.manager [None req-b7c0425f-be19-4b33-b788-3180cc28ca6d tempest-InstanceActionsV221TestJSON-713498315 tempest-InstanceActionsV221TestJSON-713498315-project-member] [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 864.820133] env[62096]: DEBUG nova.network.neutron [None req-b7c0425f-be19-4b33-b788-3180cc28ca6d tempest-InstanceActionsV221TestJSON-713498315 tempest-InstanceActionsV221TestJSON-713498315-project-member] [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 864.836320] env[62096]: DEBUG nova.network.neutron [None req-b7c0425f-be19-4b33-b788-3180cc28ca6d tempest-InstanceActionsV221TestJSON-713498315 tempest-InstanceActionsV221TestJSON-713498315-project-member] [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 864.836893] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b7c0425f-be19-4b33-b788-3180cc28ca6d tempest-InstanceActionsV221TestJSON-713498315 tempest-InstanceActionsV221TestJSON-713498315-project-member] Expecting reply to msg f4008028f755487bb9dd97272347b7a1 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 864.843862] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f4008028f755487bb9dd97272347b7a1 [ 864.961697] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-747fd448-876a-409b-b935-014581cbbabf {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.969174] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35dba4e1-6de9-425a-9001-8623fa8c5b91 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.999558] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f87c505-09dd-42c9-aaf8-f2faf5533bdf {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.006740] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f568604-398a-4ce1-a1ca-15b3ed3f37af {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.019547] env[62096]: DEBUG nova.compute.provider_tree [None req-bb5902e0-898e-4848-be60-0804ae5f57a5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Updating inventory in ProviderTree for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 865.020062] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-bb5902e0-898e-4848-be60-0804ae5f57a5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg ecf7ce34f0d64a9db0a1633b36a82c52 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 865.026844] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ecf7ce34f0d64a9db0a1633b36a82c52 [ 865.100652] env[62096]: INFO nova.scheduler.client.report [None req-265784be-d456-4382-9cd2-a751a291282e tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Deleted allocations for instance 012afef5-91e9-4fc1-af98-c17a3188ad45 [ 865.107128] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-265784be-d456-4382-9cd2-a751a291282e tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg aeb74d08a44a4e8c8d1c353a964514e8 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 865.120605] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg aeb74d08a44a4e8c8d1c353a964514e8 [ 865.153805] env[62096]: DEBUG oslo_vmware.api [None req-e0ec7c35-3ae1-4bda-b6d5-d68ea34f2cbc tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Task: {'id': task-397421, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.08368} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.154227] env[62096]: DEBUG nova.virt.vmwareapi.ds_util [None req-e0ec7c35-3ae1-4bda-b6d5-d68ea34f2cbc tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Deleted the datastore file {{(pid=62096) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 865.154529] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-e0ec7c35-3ae1-4bda-b6d5-d68ea34f2cbc tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] [instance: a7838568-9d47-4306-8bb6-9ad74ab1feb3] Deleted contents of the VM from datastore datastore2 {{(pid=62096) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 865.154817] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-e0ec7c35-3ae1-4bda-b6d5-d68ea34f2cbc tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] [instance: a7838568-9d47-4306-8bb6-9ad74ab1feb3] Instance destroyed {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 865.156584] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-e0ec7c35-3ae1-4bda-b6d5-d68ea34f2cbc tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Expecting reply to msg a8fa212373cc4498b26db0214786b836 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 865.183249] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a8fa212373cc4498b26db0214786b836 [ 865.306052] env[62096]: DEBUG nova.network.neutron [-] [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 865.306816] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 579fe51bc32c4c1a9f1b26f2590a218f in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 865.315324] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 579fe51bc32c4c1a9f1b26f2590a218f [ 865.341866] env[62096]: DEBUG nova.network.neutron [None req-b7c0425f-be19-4b33-b788-3180cc28ca6d tempest-InstanceActionsV221TestJSON-713498315 tempest-InstanceActionsV221TestJSON-713498315-project-member] [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 865.342492] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b7c0425f-be19-4b33-b788-3180cc28ca6d tempest-InstanceActionsV221TestJSON-713498315 tempest-InstanceActionsV221TestJSON-713498315-project-member] Expecting reply to msg fd77f197c2804f129ef5e8cd0ad69aa7 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 865.350640] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fd77f197c2804f129ef5e8cd0ad69aa7 [ 865.539339] env[62096]: ERROR nova.scheduler.client.report [None req-bb5902e0-898e-4848-be60-0804ae5f57a5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [req-8afafaeb-4035-46f4-9d87-cdd460ca1c45] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 6eefe13c-ab55-4c03-987f-47a62756c3b3. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-8afafaeb-4035-46f4-9d87-cdd460ca1c45"}]} [ 865.555701] env[62096]: DEBUG nova.scheduler.client.report [None req-bb5902e0-898e-4848-be60-0804ae5f57a5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Refreshing inventories for resource provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 865.570132] env[62096]: DEBUG nova.scheduler.client.report [None req-bb5902e0-898e-4848-be60-0804ae5f57a5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Updating ProviderTree inventory for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 865.570358] env[62096]: DEBUG nova.compute.provider_tree [None req-bb5902e0-898e-4848-be60-0804ae5f57a5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Updating inventory in ProviderTree for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 865.581414] env[62096]: DEBUG nova.scheduler.client.report [None req-bb5902e0-898e-4848-be60-0804ae5f57a5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Refreshing aggregate associations for resource provider 6eefe13c-ab55-4c03-987f-47a62756c3b3, aggregates: None {{(pid=62096) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 865.598612] env[62096]: DEBUG nova.scheduler.client.report [None req-bb5902e0-898e-4848-be60-0804ae5f57a5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Refreshing trait associations for resource provider 6eefe13c-ab55-4c03-987f-47a62756c3b3, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO {{(pid=62096) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 865.609008] env[62096]: DEBUG oslo_concurrency.lockutils [None req-265784be-d456-4382-9cd2-a751a291282e tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Lock "012afef5-91e9-4fc1-af98-c17a3188ad45" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 136.662s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 865.609551] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-66bcc827-9e2c-4570-a904-739ded1c169f tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Expecting reply to msg b5d818819a6b45c381758e27fb732c46 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 865.620055] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b5d818819a6b45c381758e27fb732c46 [ 865.661476] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-e0ec7c35-3ae1-4bda-b6d5-d68ea34f2cbc tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Expecting reply to msg ba8a4503bd7843328e836359fc399dfa in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 865.689841] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ba8a4503bd7843328e836359fc399dfa [ 865.766577] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-981be55a-a990-4e5c-9ef9-6c54aec39c58 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.773693] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55f206b7-4a92-45de-bd44-090c41fbf776 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.803416] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3a1541e-063f-4c9a-8635-6148f1aabd15 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.809077] env[62096]: INFO nova.compute.manager [-] [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] Took 1.02 seconds to deallocate network for instance. [ 865.812043] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b10bcd1-2825-4040-913c-6aa0aa8fd561 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.815521] env[62096]: DEBUG nova.compute.claims [None req-9602e4c9-801d-4ec4-85d8-84d6ec45cb64 tempest-ServersTestJSON-242390470 tempest-ServersTestJSON-242390470-project-member] [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] Aborting claim: {{(pid=62096) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 865.815699] env[62096]: DEBUG oslo_concurrency.lockutils [None req-9602e4c9-801d-4ec4-85d8-84d6ec45cb64 tempest-ServersTestJSON-242390470 tempest-ServersTestJSON-242390470-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 865.825231] env[62096]: DEBUG nova.compute.provider_tree [None req-bb5902e0-898e-4848-be60-0804ae5f57a5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Updating inventory in ProviderTree for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 865.825713] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-bb5902e0-898e-4848-be60-0804ae5f57a5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg 76d9b4b6d59740ab8fd74e5e9aaae10e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 865.833748] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 76d9b4b6d59740ab8fd74e5e9aaae10e [ 865.844273] env[62096]: INFO nova.compute.manager [None req-b7c0425f-be19-4b33-b788-3180cc28ca6d tempest-InstanceActionsV221TestJSON-713498315 tempest-InstanceActionsV221TestJSON-713498315-project-member] [instance: fcf84c67-fdea-41d7-aed9-690a45c97eaa] Took 1.02 seconds to deallocate network for instance. [ 865.846022] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b7c0425f-be19-4b33-b788-3180cc28ca6d tempest-InstanceActionsV221TestJSON-713498315 tempest-InstanceActionsV221TestJSON-713498315-project-member] Expecting reply to msg a1999a8228154d8ea5bb031c7df2b669 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 865.880254] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a1999a8228154d8ea5bb031c7df2b669 [ 866.112174] env[62096]: DEBUG nova.compute.manager [None req-66bcc827-9e2c-4570-a904-739ded1c169f tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] Starting instance... {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 866.114069] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-66bcc827-9e2c-4570-a904-739ded1c169f tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Expecting reply to msg 10ecfb50455a4aaeb171e594ea5a2379 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 866.152242] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 10ecfb50455a4aaeb171e594ea5a2379 [ 866.188097] env[62096]: DEBUG nova.virt.hardware [None req-e0ec7c35-3ae1-4bda-b6d5-d68ea34f2cbc tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-19T11:52:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-19T11:51:55Z,direct_url=,disk_format='vmdk',id=fb764baa-3805-45c4-a694-aa91b0932110,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bd76085d004b408bbcf1ebf76ba2593e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-19T11:51:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 866.188368] env[62096]: DEBUG nova.virt.hardware [None req-e0ec7c35-3ae1-4bda-b6d5-d68ea34f2cbc tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Flavor limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 866.188520] env[62096]: DEBUG nova.virt.hardware [None req-e0ec7c35-3ae1-4bda-b6d5-d68ea34f2cbc tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Image limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 866.188698] env[62096]: DEBUG nova.virt.hardware [None req-e0ec7c35-3ae1-4bda-b6d5-d68ea34f2cbc tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Flavor pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 866.188841] env[62096]: DEBUG nova.virt.hardware [None req-e0ec7c35-3ae1-4bda-b6d5-d68ea34f2cbc tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Image pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 866.188985] env[62096]: DEBUG nova.virt.hardware [None req-e0ec7c35-3ae1-4bda-b6d5-d68ea34f2cbc tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 866.189195] env[62096]: DEBUG nova.virt.hardware [None req-e0ec7c35-3ae1-4bda-b6d5-d68ea34f2cbc tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 866.189350] env[62096]: DEBUG nova.virt.hardware [None req-e0ec7c35-3ae1-4bda-b6d5-d68ea34f2cbc tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 866.189514] env[62096]: DEBUG nova.virt.hardware [None req-e0ec7c35-3ae1-4bda-b6d5-d68ea34f2cbc tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Got 1 possible topologies {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 866.189673] env[62096]: DEBUG nova.virt.hardware [None req-e0ec7c35-3ae1-4bda-b6d5-d68ea34f2cbc tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 866.189840] env[62096]: DEBUG nova.virt.hardware [None req-e0ec7c35-3ae1-4bda-b6d5-d68ea34f2cbc tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 866.190976] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fda21ea-6300-47bf-9641-a213f817ae10 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.202083] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-127ca854-98a7-4d15-b6e3-6d90f243ddcb {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.219805] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-e0ec7c35-3ae1-4bda-b6d5-d68ea34f2cbc tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] [instance: a7838568-9d47-4306-8bb6-9ad74ab1feb3] Instance VIF info [] {{(pid=62096) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 866.225370] env[62096]: DEBUG oslo.service.loopingcall [None req-e0ec7c35-3ae1-4bda-b6d5-d68ea34f2cbc tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 866.225929] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a7838568-9d47-4306-8bb6-9ad74ab1feb3] Creating VM on the ESX host {{(pid=62096) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 866.226147] env[62096]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8a05a791-af58-4179-a4d0-cf0d416e6d6b {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.243352] env[62096]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 866.243352] env[62096]: value = "task-397422" [ 866.243352] env[62096]: _type = "Task" [ 866.243352] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.250977] env[62096]: DEBUG oslo_vmware.api [-] Task: {'id': task-397422, 'name': CreateVM_Task} progress is 0%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.352207] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b7c0425f-be19-4b33-b788-3180cc28ca6d tempest-InstanceActionsV221TestJSON-713498315 tempest-InstanceActionsV221TestJSON-713498315-project-member] Expecting reply to msg cc23d88d35c347299438c489fa78d879 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 866.356776] env[62096]: DEBUG nova.scheduler.client.report [None req-bb5902e0-898e-4848-be60-0804ae5f57a5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Updated inventory for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 with generation 95 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:957}} [ 866.357034] env[62096]: DEBUG nova.compute.provider_tree [None req-bb5902e0-898e-4848-be60-0804ae5f57a5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Updating resource provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 generation from 95 to 96 during operation: update_inventory {{(pid=62096) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 866.357212] env[62096]: DEBUG nova.compute.provider_tree [None req-bb5902e0-898e-4848-be60-0804ae5f57a5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Updating inventory in ProviderTree for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 866.359834] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-bb5902e0-898e-4848-be60-0804ae5f57a5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg 571de74ddea2496291cf27b7ed41fe30 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 866.374461] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 571de74ddea2496291cf27b7ed41fe30 [ 866.386408] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cc23d88d35c347299438c489fa78d879 [ 866.633960] env[62096]: DEBUG oslo_concurrency.lockutils [None req-66bcc827-9e2c-4570-a904-739ded1c169f tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 866.753302] env[62096]: DEBUG oslo_vmware.api [-] Task: {'id': task-397422, 'name': CreateVM_Task, 'duration_secs': 0.244803} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.753753] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a7838568-9d47-4306-8bb6-9ad74ab1feb3] Created VM on the ESX host {{(pid=62096) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 866.754353] env[62096]: DEBUG oslo_concurrency.lockutils [None req-e0ec7c35-3ae1-4bda-b6d5-d68ea34f2cbc tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 866.754681] env[62096]: DEBUG oslo_concurrency.lockutils [None req-e0ec7c35-3ae1-4bda-b6d5-d68ea34f2cbc tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Acquired lock "[datastore2] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 866.755181] env[62096]: DEBUG oslo_concurrency.lockutils [None req-e0ec7c35-3ae1-4bda-b6d5-d68ea34f2cbc tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 866.755571] env[62096]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-db074a55-4ed9-4574-bd5c-b9b3690ae394 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.759891] env[62096]: DEBUG oslo_vmware.api [None req-e0ec7c35-3ae1-4bda-b6d5-d68ea34f2cbc tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Waiting for the task: (returnval){ [ 866.759891] env[62096]: value = "session[5264b2d0-7e3b-3d3c-bfd1-bd630cad6eae]5274204b-af66-0ba3-1369-8f30b7089057" [ 866.759891] env[62096]: _type = "Task" [ 866.759891] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.767214] env[62096]: DEBUG oslo_vmware.api [None req-e0ec7c35-3ae1-4bda-b6d5-d68ea34f2cbc tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Task: {'id': session[5264b2d0-7e3b-3d3c-bfd1-bd630cad6eae]5274204b-af66-0ba3-1369-8f30b7089057, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.862173] env[62096]: DEBUG oslo_concurrency.lockutils [None req-bb5902e0-898e-4848-be60-0804ae5f57a5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.144s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 866.862946] env[62096]: DEBUG nova.compute.manager [None req-bb5902e0-898e-4848-be60-0804ae5f57a5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: aa3e06b6-89f8-425f-8950-30deb025a473] Start building networks asynchronously for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 866.864961] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-bb5902e0-898e-4848-be60-0804ae5f57a5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg a9f6f229f8c1463caa0723582c9c0f6a in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 866.866657] env[62096]: DEBUG oslo_concurrency.lockutils [None req-7bd8e5dd-5e58-4625-8471-f91ad349cc84 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.131s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 866.868393] env[62096]: INFO nova.compute.claims [None req-7bd8e5dd-5e58-4625-8471-f91ad349cc84 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] [instance: 5627f913-29d2-476e-8fde-8ea457cc56f2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 866.870099] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-7bd8e5dd-5e58-4625-8471-f91ad349cc84 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Expecting reply to msg 8d1b5d9d3258430eb2989e2af513db9e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 866.872881] env[62096]: INFO nova.scheduler.client.report [None req-b7c0425f-be19-4b33-b788-3180cc28ca6d tempest-InstanceActionsV221TestJSON-713498315 tempest-InstanceActionsV221TestJSON-713498315-project-member] Deleted allocations for instance fcf84c67-fdea-41d7-aed9-690a45c97eaa [ 866.882588] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b7c0425f-be19-4b33-b788-3180cc28ca6d tempest-InstanceActionsV221TestJSON-713498315 tempest-InstanceActionsV221TestJSON-713498315-project-member] Expecting reply to msg 801d2ff99d054d429e78f2e56e15235a in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 866.903309] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 801d2ff99d054d429e78f2e56e15235a [ 866.912449] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8d1b5d9d3258430eb2989e2af513db9e [ 866.913105] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a9f6f229f8c1463caa0723582c9c0f6a [ 867.086246] env[62096]: DEBUG oslo_concurrency.lockutils [None req-0fdf712d-9ab7-47de-a5a9-f02790031e75 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Acquiring lock "0f275603-acfc-43db-8a71-a17af8e837b4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 867.086690] env[62096]: DEBUG oslo_concurrency.lockutils [None req-0fdf712d-9ab7-47de-a5a9-f02790031e75 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Lock "0f275603-acfc-43db-8a71-a17af8e837b4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 867.272248] env[62096]: DEBUG oslo_vmware.api [None req-e0ec7c35-3ae1-4bda-b6d5-d68ea34f2cbc tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Task: {'id': session[5264b2d0-7e3b-3d3c-bfd1-bd630cad6eae]5274204b-af66-0ba3-1369-8f30b7089057, 'name': SearchDatastore_Task, 'duration_secs': 0.008796} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.272735] env[62096]: DEBUG oslo_concurrency.lockutils [None req-e0ec7c35-3ae1-4bda-b6d5-d68ea34f2cbc tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Releasing lock "[datastore2] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 867.273110] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-e0ec7c35-3ae1-4bda-b6d5-d68ea34f2cbc tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] [instance: a7838568-9d47-4306-8bb6-9ad74ab1feb3] Processing image fb764baa-3805-45c4-a694-aa91b0932110 {{(pid=62096) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 867.273499] env[62096]: DEBUG oslo_concurrency.lockutils [None req-e0ec7c35-3ae1-4bda-b6d5-d68ea34f2cbc tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110/fb764baa-3805-45c4-a694-aa91b0932110.vmdk" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 867.273805] env[62096]: DEBUG oslo_concurrency.lockutils [None req-e0ec7c35-3ae1-4bda-b6d5-d68ea34f2cbc tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Acquired lock "[datastore2] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110/fb764baa-3805-45c4-a694-aa91b0932110.vmdk" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 867.274165] env[62096]: DEBUG nova.virt.vmwareapi.ds_util [None req-e0ec7c35-3ae1-4bda-b6d5-d68ea34f2cbc tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62096) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 867.274549] env[62096]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-15d5ac8b-8669-43e1-a9c8-8846f26224cc {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.282366] env[62096]: DEBUG nova.virt.vmwareapi.ds_util [None req-e0ec7c35-3ae1-4bda-b6d5-d68ea34f2cbc tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62096) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 867.282690] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-e0ec7c35-3ae1-4bda-b6d5-d68ea34f2cbc tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62096) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 867.283493] env[62096]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-28475757-194e-4fb5-ba48-ae56ea7ccd3c {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.288344] env[62096]: DEBUG oslo_vmware.api [None req-e0ec7c35-3ae1-4bda-b6d5-d68ea34f2cbc tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Waiting for the task: (returnval){ [ 867.288344] env[62096]: value = "session[5264b2d0-7e3b-3d3c-bfd1-bd630cad6eae]5264d400-db7a-28dc-06b2-b1e5e588fe9a" [ 867.288344] env[62096]: _type = "Task" [ 867.288344] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.295959] env[62096]: DEBUG oslo_vmware.api [None req-e0ec7c35-3ae1-4bda-b6d5-d68ea34f2cbc tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Task: {'id': session[5264b2d0-7e3b-3d3c-bfd1-bd630cad6eae]5264d400-db7a-28dc-06b2-b1e5e588fe9a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.374205] env[62096]: DEBUG nova.compute.utils [None req-bb5902e0-898e-4848-be60-0804ae5f57a5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Using /dev/sd instead of None {{(pid=62096) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 867.375082] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-bb5902e0-898e-4848-be60-0804ae5f57a5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg 99844f1e12804ce3a00226d6cdb83bdf in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 867.376176] env[62096]: DEBUG nova.compute.manager [None req-bb5902e0-898e-4848-be60-0804ae5f57a5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: aa3e06b6-89f8-425f-8950-30deb025a473] Allocating IP information in the background. {{(pid=62096) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 867.376547] env[62096]: DEBUG nova.network.neutron [None req-bb5902e0-898e-4848-be60-0804ae5f57a5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: aa3e06b6-89f8-425f-8950-30deb025a473] allocate_for_instance() {{(pid=62096) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 867.379781] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-7bd8e5dd-5e58-4625-8471-f91ad349cc84 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Expecting reply to msg dd45e962364947758eeead925a7c081b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 867.384695] env[62096]: DEBUG oslo_concurrency.lockutils [None req-b7c0425f-be19-4b33-b788-3180cc28ca6d tempest-InstanceActionsV221TestJSON-713498315 tempest-InstanceActionsV221TestJSON-713498315-project-member] Lock "fcf84c67-fdea-41d7-aed9-690a45c97eaa" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 129.110s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 867.385469] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d8bc9aca-755b-4171-8a37-b8947b203c4b tempest-ServerPasswordTestJSON-1799855642 tempest-ServerPasswordTestJSON-1799855642-project-member] Expecting reply to msg e4aa8e9fa3714bd4922d9eb598545bed in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 867.386477] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 99844f1e12804ce3a00226d6cdb83bdf [ 867.387082] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dd45e962364947758eeead925a7c081b [ 867.399541] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e4aa8e9fa3714bd4922d9eb598545bed [ 867.447359] env[62096]: DEBUG nova.policy [None req-bb5902e0-898e-4848-be60-0804ae5f57a5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '09a4673f033c4d139efe4cd9ba4b7560', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd81a5a37184d4b29ad6df7e77dfd3ee4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62096) authorize /opt/stack/nova/nova/policy.py:203}} [ 867.800173] env[62096]: DEBUG oslo_vmware.api [None req-e0ec7c35-3ae1-4bda-b6d5-d68ea34f2cbc tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Task: {'id': session[5264b2d0-7e3b-3d3c-bfd1-bd630cad6eae]5264d400-db7a-28dc-06b2-b1e5e588fe9a, 'name': SearchDatastore_Task, 'duration_secs': 0.007422} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.800173] env[62096]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e52de0a8-b445-4bc4-b1eb-79f0930d1dac {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.804987] env[62096]: DEBUG oslo_vmware.api [None req-e0ec7c35-3ae1-4bda-b6d5-d68ea34f2cbc tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Waiting for the task: (returnval){ [ 867.804987] env[62096]: value = "session[5264b2d0-7e3b-3d3c-bfd1-bd630cad6eae]52904b84-274f-a6e5-91a5-f8273e2fee19" [ 867.804987] env[62096]: _type = "Task" [ 867.804987] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.814897] env[62096]: DEBUG oslo_vmware.api [None req-e0ec7c35-3ae1-4bda-b6d5-d68ea34f2cbc tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Task: {'id': session[5264b2d0-7e3b-3d3c-bfd1-bd630cad6eae]52904b84-274f-a6e5-91a5-f8273e2fee19, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.864578] env[62096]: DEBUG nova.network.neutron [None req-bb5902e0-898e-4848-be60-0804ae5f57a5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: aa3e06b6-89f8-425f-8950-30deb025a473] Successfully created port: c7c41772-9e80-4043-b62d-69cf4459ebc3 {{(pid=62096) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 867.879416] env[62096]: DEBUG nova.compute.manager [None req-bb5902e0-898e-4848-be60-0804ae5f57a5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: aa3e06b6-89f8-425f-8950-30deb025a473] Start building block device mappings for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 867.881161] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-bb5902e0-898e-4848-be60-0804ae5f57a5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg 6f0c019731d944fd83bcac904f76b864 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 867.888586] env[62096]: DEBUG nova.compute.manager [None req-d8bc9aca-755b-4171-8a37-b8947b203c4b tempest-ServerPasswordTestJSON-1799855642 tempest-ServerPasswordTestJSON-1799855642-project-member] [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] Starting instance... {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 867.890283] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d8bc9aca-755b-4171-8a37-b8947b203c4b tempest-ServerPasswordTestJSON-1799855642 tempest-ServerPasswordTestJSON-1799855642-project-member] Expecting reply to msg be4620c543c745dfbd53f096329c55e1 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 867.916940] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6f0c019731d944fd83bcac904f76b864 [ 867.929901] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg be4620c543c745dfbd53f096329c55e1 [ 868.102194] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c2292d2-74c0-491b-ae33-09db53622ce4 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.109817] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f15cbc68-5b8d-40c6-8dae-5edf39e8c113 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.141375] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3471d54e-6573-4a3f-9e8d-eb63072ea19e {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.149736] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e218631-004a-4e94-8469-5940c615bcfe {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.163791] env[62096]: DEBUG nova.compute.provider_tree [None req-7bd8e5dd-5e58-4625-8471-f91ad349cc84 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 868.164557] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-7bd8e5dd-5e58-4625-8471-f91ad349cc84 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Expecting reply to msg eeb998381baa41bab92eb6aa853b3667 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 868.174846] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg eeb998381baa41bab92eb6aa853b3667 [ 868.316155] env[62096]: DEBUG oslo_vmware.api [None req-e0ec7c35-3ae1-4bda-b6d5-d68ea34f2cbc tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Task: {'id': session[5264b2d0-7e3b-3d3c-bfd1-bd630cad6eae]52904b84-274f-a6e5-91a5-f8273e2fee19, 'name': SearchDatastore_Task, 'duration_secs': 0.00825} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.316614] env[62096]: DEBUG oslo_concurrency.lockutils [None req-e0ec7c35-3ae1-4bda-b6d5-d68ea34f2cbc tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Releasing lock "[datastore2] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110/fb764baa-3805-45c4-a694-aa91b0932110.vmdk" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 868.317038] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0ec7c35-3ae1-4bda-b6d5-d68ea34f2cbc tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110/fb764baa-3805-45c4-a694-aa91b0932110.vmdk to [datastore2] a7838568-9d47-4306-8bb6-9ad74ab1feb3/a7838568-9d47-4306-8bb6-9ad74ab1feb3.vmdk {{(pid=62096) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 868.317452] env[62096]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-71bcbbf2-f6f9-497a-9895-270ed74b3677 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.324701] env[62096]: DEBUG oslo_vmware.api [None req-e0ec7c35-3ae1-4bda-b6d5-d68ea34f2cbc tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Waiting for the task: (returnval){ [ 868.324701] env[62096]: value = "task-397423" [ 868.324701] env[62096]: _type = "Task" [ 868.324701] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.332858] env[62096]: DEBUG oslo_vmware.api [None req-e0ec7c35-3ae1-4bda-b6d5-d68ea34f2cbc tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Task: {'id': task-397423, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.388241] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-bb5902e0-898e-4848-be60-0804ae5f57a5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg 377cd8fa0bf047e3ac9ffd08fe665089 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 868.420873] env[62096]: DEBUG oslo_concurrency.lockutils [None req-d8bc9aca-755b-4171-8a37-b8947b203c4b tempest-ServerPasswordTestJSON-1799855642 tempest-ServerPasswordTestJSON-1799855642-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 868.429683] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 377cd8fa0bf047e3ac9ffd08fe665089 [ 868.529235] env[62096]: DEBUG nova.compute.manager [req-57f42cea-e897-4a28-ab76-1287d10c8e39 req-43ebcaba-9bec-4f26-8efc-0653f817c45b service nova] [instance: aa3e06b6-89f8-425f-8950-30deb025a473] Received event network-changed-c7c41772-9e80-4043-b62d-69cf4459ebc3 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 868.529810] env[62096]: DEBUG nova.compute.manager [req-57f42cea-e897-4a28-ab76-1287d10c8e39 req-43ebcaba-9bec-4f26-8efc-0653f817c45b service nova] [instance: aa3e06b6-89f8-425f-8950-30deb025a473] Refreshing instance network info cache due to event network-changed-c7c41772-9e80-4043-b62d-69cf4459ebc3. {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 868.530501] env[62096]: DEBUG oslo_concurrency.lockutils [req-57f42cea-e897-4a28-ab76-1287d10c8e39 req-43ebcaba-9bec-4f26-8efc-0653f817c45b service nova] Acquiring lock "refresh_cache-aa3e06b6-89f8-425f-8950-30deb025a473" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 868.530803] env[62096]: DEBUG oslo_concurrency.lockutils [req-57f42cea-e897-4a28-ab76-1287d10c8e39 req-43ebcaba-9bec-4f26-8efc-0653f817c45b service nova] Acquired lock "refresh_cache-aa3e06b6-89f8-425f-8950-30deb025a473" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 868.531109] env[62096]: DEBUG nova.network.neutron [req-57f42cea-e897-4a28-ab76-1287d10c8e39 req-43ebcaba-9bec-4f26-8efc-0653f817c45b service nova] [instance: aa3e06b6-89f8-425f-8950-30deb025a473] Refreshing network info cache for port c7c41772-9e80-4043-b62d-69cf4459ebc3 {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 868.532198] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-57f42cea-e897-4a28-ab76-1287d10c8e39 req-43ebcaba-9bec-4f26-8efc-0653f817c45b service nova] Expecting reply to msg 10de9a3f40244df0b0e5d6b0df59161b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 868.541788] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 10de9a3f40244df0b0e5d6b0df59161b [ 868.671677] env[62096]: DEBUG nova.scheduler.client.report [None req-7bd8e5dd-5e58-4625-8471-f91ad349cc84 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 868.674269] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-7bd8e5dd-5e58-4625-8471-f91ad349cc84 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Expecting reply to msg 2fc4d3f5622f471eb9ed0d35cf833e57 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 868.701139] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2fc4d3f5622f471eb9ed0d35cf833e57 [ 868.723482] env[62096]: ERROR nova.compute.manager [None req-bb5902e0-898e-4848-be60-0804ae5f57a5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port c7c41772-9e80-4043-b62d-69cf4459ebc3, please check neutron logs for more information. [ 868.723482] env[62096]: ERROR nova.compute.manager Traceback (most recent call last): [ 868.723482] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 868.723482] env[62096]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 868.723482] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 868.723482] env[62096]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 868.723482] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 868.723482] env[62096]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 868.723482] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 868.723482] env[62096]: ERROR nova.compute.manager self.force_reraise() [ 868.723482] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 868.723482] env[62096]: ERROR nova.compute.manager raise self.value [ 868.723482] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 868.723482] env[62096]: ERROR nova.compute.manager updated_port = self._update_port( [ 868.723482] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 868.723482] env[62096]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 868.723940] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 868.723940] env[62096]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 868.723940] env[62096]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port c7c41772-9e80-4043-b62d-69cf4459ebc3, please check neutron logs for more information. [ 868.723940] env[62096]: ERROR nova.compute.manager [ 868.723940] env[62096]: Traceback (most recent call last): [ 868.723940] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 868.723940] env[62096]: listener.cb(fileno) [ 868.723940] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 868.723940] env[62096]: result = function(*args, **kwargs) [ 868.723940] env[62096]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 868.723940] env[62096]: return func(*args, **kwargs) [ 868.723940] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 868.723940] env[62096]: raise e [ 868.723940] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 868.723940] env[62096]: nwinfo = self.network_api.allocate_for_instance( [ 868.723940] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 868.723940] env[62096]: created_port_ids = self._update_ports_for_instance( [ 868.723940] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 868.723940] env[62096]: with excutils.save_and_reraise_exception(): [ 868.723940] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 868.723940] env[62096]: self.force_reraise() [ 868.723940] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 868.723940] env[62096]: raise self.value [ 868.723940] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 868.723940] env[62096]: updated_port = self._update_port( [ 868.723940] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 868.723940] env[62096]: _ensure_no_port_binding_failure(port) [ 868.723940] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 868.723940] env[62096]: raise exception.PortBindingFailed(port_id=port['id']) [ 868.724801] env[62096]: nova.exception.PortBindingFailed: Binding failed for port c7c41772-9e80-4043-b62d-69cf4459ebc3, please check neutron logs for more information. [ 868.724801] env[62096]: Removing descriptor: 14 [ 868.836057] env[62096]: DEBUG oslo_vmware.api [None req-e0ec7c35-3ae1-4bda-b6d5-d68ea34f2cbc tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Task: {'id': task-397423, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.434933} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.836438] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0ec7c35-3ae1-4bda-b6d5-d68ea34f2cbc tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110/fb764baa-3805-45c4-a694-aa91b0932110.vmdk to [datastore2] a7838568-9d47-4306-8bb6-9ad74ab1feb3/a7838568-9d47-4306-8bb6-9ad74ab1feb3.vmdk {{(pid=62096) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 868.836653] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-e0ec7c35-3ae1-4bda-b6d5-d68ea34f2cbc tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] [instance: a7838568-9d47-4306-8bb6-9ad74ab1feb3] Extending root virtual disk to 1048576 {{(pid=62096) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 868.837218] env[62096]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bc508f0f-f98c-4674-a691-4e26b85b484f {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.843947] env[62096]: DEBUG oslo_vmware.api [None req-e0ec7c35-3ae1-4bda-b6d5-d68ea34f2cbc tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Waiting for the task: (returnval){ [ 868.843947] env[62096]: value = "task-397424" [ 868.843947] env[62096]: _type = "Task" [ 868.843947] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.852114] env[62096]: DEBUG oslo_vmware.api [None req-e0ec7c35-3ae1-4bda-b6d5-d68ea34f2cbc tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Task: {'id': task-397424, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.892016] env[62096]: DEBUG nova.compute.manager [None req-bb5902e0-898e-4848-be60-0804ae5f57a5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: aa3e06b6-89f8-425f-8950-30deb025a473] Start spawning the instance on the hypervisor. {{(pid=62096) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 868.918097] env[62096]: DEBUG nova.virt.hardware [None req-bb5902e0-898e-4848-be60-0804ae5f57a5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-19T11:52:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-19T11:51:55Z,direct_url=,disk_format='vmdk',id=fb764baa-3805-45c4-a694-aa91b0932110,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bd76085d004b408bbcf1ebf76ba2593e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-19T11:51:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 868.919108] env[62096]: DEBUG nova.virt.hardware [None req-bb5902e0-898e-4848-be60-0804ae5f57a5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Flavor limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 868.919296] env[62096]: DEBUG nova.virt.hardware [None req-bb5902e0-898e-4848-be60-0804ae5f57a5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Image limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 868.919511] env[62096]: DEBUG nova.virt.hardware [None req-bb5902e0-898e-4848-be60-0804ae5f57a5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Flavor pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 868.919692] env[62096]: DEBUG nova.virt.hardware [None req-bb5902e0-898e-4848-be60-0804ae5f57a5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Image pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 868.919857] env[62096]: DEBUG nova.virt.hardware [None req-bb5902e0-898e-4848-be60-0804ae5f57a5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 868.920129] env[62096]: DEBUG nova.virt.hardware [None req-bb5902e0-898e-4848-be60-0804ae5f57a5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 868.920318] env[62096]: DEBUG nova.virt.hardware [None req-bb5902e0-898e-4848-be60-0804ae5f57a5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 868.920511] env[62096]: DEBUG nova.virt.hardware [None req-bb5902e0-898e-4848-be60-0804ae5f57a5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Got 1 possible topologies {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 868.920702] env[62096]: DEBUG nova.virt.hardware [None req-bb5902e0-898e-4848-be60-0804ae5f57a5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 868.920903] env[62096]: DEBUG nova.virt.hardware [None req-bb5902e0-898e-4848-be60-0804ae5f57a5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 868.921988] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ffdcf32-7b84-46b8-972d-065c43c06cff {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.930029] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eba06f80-df66-4570-9ed1-d562fa7c6f1c {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.946016] env[62096]: ERROR nova.compute.manager [None req-bb5902e0-898e-4848-be60-0804ae5f57a5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: aa3e06b6-89f8-425f-8950-30deb025a473] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port c7c41772-9e80-4043-b62d-69cf4459ebc3, please check neutron logs for more information. [ 868.946016] env[62096]: ERROR nova.compute.manager [instance: aa3e06b6-89f8-425f-8950-30deb025a473] Traceback (most recent call last): [ 868.946016] env[62096]: ERROR nova.compute.manager [instance: aa3e06b6-89f8-425f-8950-30deb025a473] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 868.946016] env[62096]: ERROR nova.compute.manager [instance: aa3e06b6-89f8-425f-8950-30deb025a473] yield resources [ 868.946016] env[62096]: ERROR nova.compute.manager [instance: aa3e06b6-89f8-425f-8950-30deb025a473] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 868.946016] env[62096]: ERROR nova.compute.manager [instance: aa3e06b6-89f8-425f-8950-30deb025a473] self.driver.spawn(context, instance, image_meta, [ 868.946016] env[62096]: ERROR nova.compute.manager [instance: aa3e06b6-89f8-425f-8950-30deb025a473] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 868.946016] env[62096]: ERROR nova.compute.manager [instance: aa3e06b6-89f8-425f-8950-30deb025a473] self._vmops.spawn(context, instance, image_meta, injected_files, [ 868.946016] env[62096]: ERROR nova.compute.manager [instance: aa3e06b6-89f8-425f-8950-30deb025a473] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 868.946016] env[62096]: ERROR nova.compute.manager [instance: aa3e06b6-89f8-425f-8950-30deb025a473] vm_ref = self.build_virtual_machine(instance, [ 868.946016] env[62096]: ERROR nova.compute.manager [instance: aa3e06b6-89f8-425f-8950-30deb025a473] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 868.946377] env[62096]: ERROR nova.compute.manager [instance: aa3e06b6-89f8-425f-8950-30deb025a473] vif_infos = vmwarevif.get_vif_info(self._session, [ 868.946377] env[62096]: ERROR nova.compute.manager [instance: aa3e06b6-89f8-425f-8950-30deb025a473] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 868.946377] env[62096]: ERROR nova.compute.manager [instance: aa3e06b6-89f8-425f-8950-30deb025a473] for vif in network_info: [ 868.946377] env[62096]: ERROR nova.compute.manager [instance: aa3e06b6-89f8-425f-8950-30deb025a473] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 868.946377] env[62096]: ERROR nova.compute.manager [instance: aa3e06b6-89f8-425f-8950-30deb025a473] return self._sync_wrapper(fn, *args, **kwargs) [ 868.946377] env[62096]: ERROR nova.compute.manager [instance: aa3e06b6-89f8-425f-8950-30deb025a473] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 868.946377] env[62096]: ERROR nova.compute.manager [instance: aa3e06b6-89f8-425f-8950-30deb025a473] self.wait() [ 868.946377] env[62096]: ERROR nova.compute.manager [instance: aa3e06b6-89f8-425f-8950-30deb025a473] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 868.946377] env[62096]: ERROR nova.compute.manager [instance: aa3e06b6-89f8-425f-8950-30deb025a473] self[:] = self._gt.wait() [ 868.946377] env[62096]: ERROR nova.compute.manager [instance: aa3e06b6-89f8-425f-8950-30deb025a473] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 868.946377] env[62096]: ERROR nova.compute.manager [instance: aa3e06b6-89f8-425f-8950-30deb025a473] return self._exit_event.wait() [ 868.946377] env[62096]: ERROR nova.compute.manager [instance: aa3e06b6-89f8-425f-8950-30deb025a473] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 868.946377] env[62096]: ERROR nova.compute.manager [instance: aa3e06b6-89f8-425f-8950-30deb025a473] current.throw(*self._exc) [ 868.946712] env[62096]: ERROR nova.compute.manager [instance: aa3e06b6-89f8-425f-8950-30deb025a473] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 868.946712] env[62096]: ERROR nova.compute.manager [instance: aa3e06b6-89f8-425f-8950-30deb025a473] result = function(*args, **kwargs) [ 868.946712] env[62096]: ERROR nova.compute.manager [instance: aa3e06b6-89f8-425f-8950-30deb025a473] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 868.946712] env[62096]: ERROR nova.compute.manager [instance: aa3e06b6-89f8-425f-8950-30deb025a473] return func(*args, **kwargs) [ 868.946712] env[62096]: ERROR nova.compute.manager [instance: aa3e06b6-89f8-425f-8950-30deb025a473] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 868.946712] env[62096]: ERROR nova.compute.manager [instance: aa3e06b6-89f8-425f-8950-30deb025a473] raise e [ 868.946712] env[62096]: ERROR nova.compute.manager [instance: aa3e06b6-89f8-425f-8950-30deb025a473] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 868.946712] env[62096]: ERROR nova.compute.manager [instance: aa3e06b6-89f8-425f-8950-30deb025a473] nwinfo = self.network_api.allocate_for_instance( [ 868.946712] env[62096]: ERROR nova.compute.manager [instance: aa3e06b6-89f8-425f-8950-30deb025a473] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 868.946712] env[62096]: ERROR nova.compute.manager [instance: aa3e06b6-89f8-425f-8950-30deb025a473] created_port_ids = self._update_ports_for_instance( [ 868.946712] env[62096]: ERROR nova.compute.manager [instance: aa3e06b6-89f8-425f-8950-30deb025a473] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 868.946712] env[62096]: ERROR nova.compute.manager [instance: aa3e06b6-89f8-425f-8950-30deb025a473] with excutils.save_and_reraise_exception(): [ 868.946712] env[62096]: ERROR nova.compute.manager [instance: aa3e06b6-89f8-425f-8950-30deb025a473] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 868.947058] env[62096]: ERROR nova.compute.manager [instance: aa3e06b6-89f8-425f-8950-30deb025a473] self.force_reraise() [ 868.947058] env[62096]: ERROR nova.compute.manager [instance: aa3e06b6-89f8-425f-8950-30deb025a473] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 868.947058] env[62096]: ERROR nova.compute.manager [instance: aa3e06b6-89f8-425f-8950-30deb025a473] raise self.value [ 868.947058] env[62096]: ERROR nova.compute.manager [instance: aa3e06b6-89f8-425f-8950-30deb025a473] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 868.947058] env[62096]: ERROR nova.compute.manager [instance: aa3e06b6-89f8-425f-8950-30deb025a473] updated_port = self._update_port( [ 868.947058] env[62096]: ERROR nova.compute.manager [instance: aa3e06b6-89f8-425f-8950-30deb025a473] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 868.947058] env[62096]: ERROR nova.compute.manager [instance: aa3e06b6-89f8-425f-8950-30deb025a473] _ensure_no_port_binding_failure(port) [ 868.947058] env[62096]: ERROR nova.compute.manager [instance: aa3e06b6-89f8-425f-8950-30deb025a473] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 868.947058] env[62096]: ERROR nova.compute.manager [instance: aa3e06b6-89f8-425f-8950-30deb025a473] raise exception.PortBindingFailed(port_id=port['id']) [ 868.947058] env[62096]: ERROR nova.compute.manager [instance: aa3e06b6-89f8-425f-8950-30deb025a473] nova.exception.PortBindingFailed: Binding failed for port c7c41772-9e80-4043-b62d-69cf4459ebc3, please check neutron logs for more information. [ 868.947058] env[62096]: ERROR nova.compute.manager [instance: aa3e06b6-89f8-425f-8950-30deb025a473] [ 868.947058] env[62096]: INFO nova.compute.manager [None req-bb5902e0-898e-4848-be60-0804ae5f57a5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: aa3e06b6-89f8-425f-8950-30deb025a473] Terminating instance [ 868.948473] env[62096]: DEBUG oslo_concurrency.lockutils [None req-bb5902e0-898e-4848-be60-0804ae5f57a5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Acquiring lock "refresh_cache-aa3e06b6-89f8-425f-8950-30deb025a473" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 869.052794] env[62096]: DEBUG nova.network.neutron [req-57f42cea-e897-4a28-ab76-1287d10c8e39 req-43ebcaba-9bec-4f26-8efc-0653f817c45b service nova] [instance: aa3e06b6-89f8-425f-8950-30deb025a473] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 869.128236] env[62096]: DEBUG nova.network.neutron [req-57f42cea-e897-4a28-ab76-1287d10c8e39 req-43ebcaba-9bec-4f26-8efc-0653f817c45b service nova] [instance: aa3e06b6-89f8-425f-8950-30deb025a473] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 869.128754] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-57f42cea-e897-4a28-ab76-1287d10c8e39 req-43ebcaba-9bec-4f26-8efc-0653f817c45b service nova] Expecting reply to msg 2997d6f6de8d4b1291d30c782efe424c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 869.139133] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2997d6f6de8d4b1291d30c782efe424c [ 869.181906] env[62096]: DEBUG oslo_concurrency.lockutils [None req-7bd8e5dd-5e58-4625-8471-f91ad349cc84 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.315s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 869.182470] env[62096]: DEBUG nova.compute.manager [None req-7bd8e5dd-5e58-4625-8471-f91ad349cc84 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] [instance: 5627f913-29d2-476e-8fde-8ea457cc56f2] Start building networks asynchronously for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 869.184426] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-7bd8e5dd-5e58-4625-8471-f91ad349cc84 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Expecting reply to msg 62056caa5fe34712b7a135b959e5685d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 869.186118] env[62096]: DEBUG oslo_concurrency.lockutils [None req-3fd8e1c2-0ecc-40e5-90b0-01e465c5f366 tempest-ServersV294TestFqdnHostnames-2113678617 tempest-ServersV294TestFqdnHostnames-2113678617-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.161s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 869.188151] env[62096]: INFO nova.compute.claims [None req-3fd8e1c2-0ecc-40e5-90b0-01e465c5f366 tempest-ServersV294TestFqdnHostnames-2113678617 tempest-ServersV294TestFqdnHostnames-2113678617-project-member] [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 869.190239] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-3fd8e1c2-0ecc-40e5-90b0-01e465c5f366 tempest-ServersV294TestFqdnHostnames-2113678617 tempest-ServersV294TestFqdnHostnames-2113678617-project-member] Expecting reply to msg b8401bcd420d482a9d959aca79623f92 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 869.225130] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 62056caa5fe34712b7a135b959e5685d [ 869.239476] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b8401bcd420d482a9d959aca79623f92 [ 869.353509] env[62096]: DEBUG oslo_vmware.api [None req-e0ec7c35-3ae1-4bda-b6d5-d68ea34f2cbc tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Task: {'id': task-397424, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.060234} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.353818] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-e0ec7c35-3ae1-4bda-b6d5-d68ea34f2cbc tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] [instance: a7838568-9d47-4306-8bb6-9ad74ab1feb3] Extended root virtual disk {{(pid=62096) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 869.354653] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-532b5bee-25ea-48bf-9bc9-1dc6647f9a8d {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.374360] env[62096]: DEBUG nova.virt.vmwareapi.volumeops [None req-e0ec7c35-3ae1-4bda-b6d5-d68ea34f2cbc tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] [instance: a7838568-9d47-4306-8bb6-9ad74ab1feb3] Reconfiguring VM instance instance-00000045 to attach disk [datastore2] a7838568-9d47-4306-8bb6-9ad74ab1feb3/a7838568-9d47-4306-8bb6-9ad74ab1feb3.vmdk or device None with type sparse {{(pid=62096) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 869.374980] env[62096]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bceaed3c-7ae2-4275-b014-000e6398d54c {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.393520] env[62096]: DEBUG oslo_vmware.api [None req-e0ec7c35-3ae1-4bda-b6d5-d68ea34f2cbc tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Waiting for the task: (returnval){ [ 869.393520] env[62096]: value = "task-397425" [ 869.393520] env[62096]: _type = "Task" [ 869.393520] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.401225] env[62096]: DEBUG oslo_vmware.api [None req-e0ec7c35-3ae1-4bda-b6d5-d68ea34f2cbc tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Task: {'id': task-397425, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.633264] env[62096]: DEBUG oslo_concurrency.lockutils [req-57f42cea-e897-4a28-ab76-1287d10c8e39 req-43ebcaba-9bec-4f26-8efc-0653f817c45b service nova] Releasing lock "refresh_cache-aa3e06b6-89f8-425f-8950-30deb025a473" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 869.633264] env[62096]: DEBUG oslo_concurrency.lockutils [None req-bb5902e0-898e-4848-be60-0804ae5f57a5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Acquired lock "refresh_cache-aa3e06b6-89f8-425f-8950-30deb025a473" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 869.633264] env[62096]: DEBUG nova.network.neutron [None req-bb5902e0-898e-4848-be60-0804ae5f57a5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: aa3e06b6-89f8-425f-8950-30deb025a473] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 869.633264] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-bb5902e0-898e-4848-be60-0804ae5f57a5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg 0dc927c7861a4f288060c28244a514d5 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 869.639915] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0dc927c7861a4f288060c28244a514d5 [ 869.687696] env[62096]: DEBUG nova.compute.utils [None req-7bd8e5dd-5e58-4625-8471-f91ad349cc84 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Using /dev/sd instead of None {{(pid=62096) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 869.688362] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-7bd8e5dd-5e58-4625-8471-f91ad349cc84 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Expecting reply to msg 7c95b42bf3054925b8b35e36b5fc59b8 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 869.689413] env[62096]: DEBUG nova.compute.manager [None req-7bd8e5dd-5e58-4625-8471-f91ad349cc84 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] [instance: 5627f913-29d2-476e-8fde-8ea457cc56f2] Not allocating networking since 'none' was specified. {{(pid=62096) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 869.699509] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-3fd8e1c2-0ecc-40e5-90b0-01e465c5f366 tempest-ServersV294TestFqdnHostnames-2113678617 tempest-ServersV294TestFqdnHostnames-2113678617-project-member] Expecting reply to msg 37a41eced54c4af690b8d587126dd648 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 869.710388] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 37a41eced54c4af690b8d587126dd648 [ 869.710928] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7c95b42bf3054925b8b35e36b5fc59b8 [ 869.904445] env[62096]: DEBUG oslo_vmware.api [None req-e0ec7c35-3ae1-4bda-b6d5-d68ea34f2cbc tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Task: {'id': task-397425, 'name': ReconfigVM_Task, 'duration_secs': 0.326594} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.904745] env[62096]: DEBUG nova.virt.vmwareapi.volumeops [None req-e0ec7c35-3ae1-4bda-b6d5-d68ea34f2cbc tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] [instance: a7838568-9d47-4306-8bb6-9ad74ab1feb3] Reconfigured VM instance instance-00000045 to attach disk [datastore2] a7838568-9d47-4306-8bb6-9ad74ab1feb3/a7838568-9d47-4306-8bb6-9ad74ab1feb3.vmdk or device None with type sparse {{(pid=62096) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 869.905355] env[62096]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-39d6a1a7-ae50-430e-8ef9-5c206f044559 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.912502] env[62096]: DEBUG oslo_vmware.api [None req-e0ec7c35-3ae1-4bda-b6d5-d68ea34f2cbc tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Waiting for the task: (returnval){ [ 869.912502] env[62096]: value = "task-397426" [ 869.912502] env[62096]: _type = "Task" [ 869.912502] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.920384] env[62096]: DEBUG oslo_vmware.api [None req-e0ec7c35-3ae1-4bda-b6d5-d68ea34f2cbc tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Task: {'id': task-397426, 'name': Rename_Task} progress is 5%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.150547] env[62096]: DEBUG nova.network.neutron [None req-bb5902e0-898e-4848-be60-0804ae5f57a5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: aa3e06b6-89f8-425f-8950-30deb025a473] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 870.196364] env[62096]: DEBUG nova.compute.manager [None req-7bd8e5dd-5e58-4625-8471-f91ad349cc84 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] [instance: 5627f913-29d2-476e-8fde-8ea457cc56f2] Start building block device mappings for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 870.198012] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-7bd8e5dd-5e58-4625-8471-f91ad349cc84 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Expecting reply to msg aff37089209747809fef9682aad30bee in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 870.236450] env[62096]: DEBUG nova.network.neutron [None req-bb5902e0-898e-4848-be60-0804ae5f57a5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: aa3e06b6-89f8-425f-8950-30deb025a473] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 870.236947] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-bb5902e0-898e-4848-be60-0804ae5f57a5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg e310e775a6cf4c0096147c9be8874403 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 870.238204] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg aff37089209747809fef9682aad30bee [ 870.245780] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e310e775a6cf4c0096147c9be8874403 [ 870.384737] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f58c4df-05d1-47ba-ae36-e228e2265a86 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.392673] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-943ad09b-9666-4678-8f25-10b44d31f212 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.425500] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72d62c03-6390-4dae-9d60-450f0d520d5e {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.432363] env[62096]: DEBUG oslo_vmware.api [None req-e0ec7c35-3ae1-4bda-b6d5-d68ea34f2cbc tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Task: {'id': task-397426, 'name': Rename_Task, 'duration_secs': 0.130451} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.434319] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0ec7c35-3ae1-4bda-b6d5-d68ea34f2cbc tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] [instance: a7838568-9d47-4306-8bb6-9ad74ab1feb3] Powering on the VM {{(pid=62096) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 870.434574] env[62096]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-243a2ee0-1da3-4646-9068-b630f71f744e {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.436837] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11c57ec8-de64-45de-8704-3d2b98adcec4 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.449511] env[62096]: DEBUG nova.compute.provider_tree [None req-3fd8e1c2-0ecc-40e5-90b0-01e465c5f366 tempest-ServersV294TestFqdnHostnames-2113678617 tempest-ServersV294TestFqdnHostnames-2113678617-project-member] Updating inventory in ProviderTree for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 870.449974] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-3fd8e1c2-0ecc-40e5-90b0-01e465c5f366 tempest-ServersV294TestFqdnHostnames-2113678617 tempest-ServersV294TestFqdnHostnames-2113678617-project-member] Expecting reply to msg 959fc7a0a3f94a3aac43bfa06d8ee790 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 870.452420] env[62096]: DEBUG oslo_vmware.api [None req-e0ec7c35-3ae1-4bda-b6d5-d68ea34f2cbc tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Waiting for the task: (returnval){ [ 870.452420] env[62096]: value = "task-397427" [ 870.452420] env[62096]: _type = "Task" [ 870.452420] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.457422] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 959fc7a0a3f94a3aac43bfa06d8ee790 [ 870.460861] env[62096]: DEBUG oslo_vmware.api [None req-e0ec7c35-3ae1-4bda-b6d5-d68ea34f2cbc tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Task: {'id': task-397427, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.549175] env[62096]: DEBUG nova.compute.manager [req-fba66720-4a07-40a5-b2d0-cbd3bfcccdd6 req-34d031ec-e5eb-4b7f-81d1-52af98820183 service nova] [instance: aa3e06b6-89f8-425f-8950-30deb025a473] Received event network-vif-deleted-c7c41772-9e80-4043-b62d-69cf4459ebc3 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 870.703522] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-7bd8e5dd-5e58-4625-8471-f91ad349cc84 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Expecting reply to msg e0b1579dd8dd48bbac5f6aecfdd9e9e5 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 870.732199] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e0b1579dd8dd48bbac5f6aecfdd9e9e5 [ 870.739733] env[62096]: DEBUG oslo_concurrency.lockutils [None req-bb5902e0-898e-4848-be60-0804ae5f57a5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Releasing lock "refresh_cache-aa3e06b6-89f8-425f-8950-30deb025a473" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 870.740232] env[62096]: DEBUG nova.compute.manager [None req-bb5902e0-898e-4848-be60-0804ae5f57a5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: aa3e06b6-89f8-425f-8950-30deb025a473] Start destroying the instance on the hypervisor. {{(pid=62096) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 870.740443] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-bb5902e0-898e-4848-be60-0804ae5f57a5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: aa3e06b6-89f8-425f-8950-30deb025a473] Destroying instance {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 870.740950] env[62096]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-038d7495-1f2f-4ea4-a840-66a62976ebf9 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.750649] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a015195-3343-4771-b587-6027be511b48 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.772778] env[62096]: WARNING nova.virt.vmwareapi.vmops [None req-bb5902e0-898e-4848-be60-0804ae5f57a5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: aa3e06b6-89f8-425f-8950-30deb025a473] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance aa3e06b6-89f8-425f-8950-30deb025a473 could not be found. [ 870.772991] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-bb5902e0-898e-4848-be60-0804ae5f57a5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: aa3e06b6-89f8-425f-8950-30deb025a473] Instance destroyed {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 870.773169] env[62096]: INFO nova.compute.manager [None req-bb5902e0-898e-4848-be60-0804ae5f57a5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: aa3e06b6-89f8-425f-8950-30deb025a473] Took 0.03 seconds to destroy the instance on the hypervisor. [ 870.773408] env[62096]: DEBUG oslo.service.loopingcall [None req-bb5902e0-898e-4848-be60-0804ae5f57a5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 870.773628] env[62096]: DEBUG nova.compute.manager [-] [instance: aa3e06b6-89f8-425f-8950-30deb025a473] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 870.773720] env[62096]: DEBUG nova.network.neutron [-] [instance: aa3e06b6-89f8-425f-8950-30deb025a473] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 870.787611] env[62096]: DEBUG nova.network.neutron [-] [instance: aa3e06b6-89f8-425f-8950-30deb025a473] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 870.788102] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 49829d1a5ad543e085cb99f77e6a4e36 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 870.795198] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 49829d1a5ad543e085cb99f77e6a4e36 [ 870.964669] env[62096]: DEBUG oslo_vmware.api [None req-e0ec7c35-3ae1-4bda-b6d5-d68ea34f2cbc tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Task: {'id': task-397427, 'name': PowerOnVM_Task, 'duration_secs': 0.405822} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.964948] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0ec7c35-3ae1-4bda-b6d5-d68ea34f2cbc tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] [instance: a7838568-9d47-4306-8bb6-9ad74ab1feb3] Powered on the VM {{(pid=62096) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 870.965115] env[62096]: DEBUG nova.compute.manager [None req-e0ec7c35-3ae1-4bda-b6d5-d68ea34f2cbc tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] [instance: a7838568-9d47-4306-8bb6-9ad74ab1feb3] Checking state {{(pid=62096) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 870.965825] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3964bd2b-93eb-4613-ba46-e9d74a1a43ff {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.970496] env[62096]: ERROR nova.scheduler.client.report [None req-3fd8e1c2-0ecc-40e5-90b0-01e465c5f366 tempest-ServersV294TestFqdnHostnames-2113678617 tempest-ServersV294TestFqdnHostnames-2113678617-project-member] [req-79b587ae-c2c8-4efb-b0a4-54400a80037a] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 6eefe13c-ab55-4c03-987f-47a62756c3b3. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-79b587ae-c2c8-4efb-b0a4-54400a80037a"}]} [ 870.975596] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-e0ec7c35-3ae1-4bda-b6d5-d68ea34f2cbc tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Expecting reply to msg 952ef9fe99cd4b938e982781b837d4ad in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 870.987204] env[62096]: DEBUG nova.scheduler.client.report [None req-3fd8e1c2-0ecc-40e5-90b0-01e465c5f366 tempest-ServersV294TestFqdnHostnames-2113678617 tempest-ServersV294TestFqdnHostnames-2113678617-project-member] Refreshing inventories for resource provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 871.000368] env[62096]: DEBUG nova.scheduler.client.report [None req-3fd8e1c2-0ecc-40e5-90b0-01e465c5f366 tempest-ServersV294TestFqdnHostnames-2113678617 tempest-ServersV294TestFqdnHostnames-2113678617-project-member] Updating ProviderTree inventory for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 871.000581] env[62096]: DEBUG nova.compute.provider_tree [None req-3fd8e1c2-0ecc-40e5-90b0-01e465c5f366 tempest-ServersV294TestFqdnHostnames-2113678617 tempest-ServersV294TestFqdnHostnames-2113678617-project-member] Updating inventory in ProviderTree for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 871.007098] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 952ef9fe99cd4b938e982781b837d4ad [ 871.012810] env[62096]: DEBUG nova.scheduler.client.report [None req-3fd8e1c2-0ecc-40e5-90b0-01e465c5f366 tempest-ServersV294TestFqdnHostnames-2113678617 tempest-ServersV294TestFqdnHostnames-2113678617-project-member] Refreshing aggregate associations for resource provider 6eefe13c-ab55-4c03-987f-47a62756c3b3, aggregates: None {{(pid=62096) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 871.031602] env[62096]: DEBUG nova.scheduler.client.report [None req-3fd8e1c2-0ecc-40e5-90b0-01e465c5f366 tempest-ServersV294TestFqdnHostnames-2113678617 tempest-ServersV294TestFqdnHostnames-2113678617-project-member] Refreshing trait associations for resource provider 6eefe13c-ab55-4c03-987f-47a62756c3b3, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO {{(pid=62096) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 871.197928] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fd59756-4b1f-4802-a631-2aafb61a98d3 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.205525] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75996f4e-2f43-450b-b8d4-a091a4b475d4 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.209320] env[62096]: DEBUG nova.compute.manager [None req-7bd8e5dd-5e58-4625-8471-f91ad349cc84 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] [instance: 5627f913-29d2-476e-8fde-8ea457cc56f2] Start spawning the instance on the hypervisor. {{(pid=62096) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 871.239359] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff267cf7-55a1-431d-b739-11e3107e0f51 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.246262] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-358997c5-fdd3-4ecd-b61c-f74f752e7786 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.251571] env[62096]: DEBUG nova.virt.hardware [None req-7bd8e5dd-5e58-4625-8471-f91ad349cc84 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-19T11:52:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-19T11:51:55Z,direct_url=,disk_format='vmdk',id=fb764baa-3805-45c4-a694-aa91b0932110,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bd76085d004b408bbcf1ebf76ba2593e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-19T11:51:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 871.251920] env[62096]: DEBUG nova.virt.hardware [None req-7bd8e5dd-5e58-4625-8471-f91ad349cc84 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Flavor limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 871.252203] env[62096]: DEBUG nova.virt.hardware [None req-7bd8e5dd-5e58-4625-8471-f91ad349cc84 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Image limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 871.252522] env[62096]: DEBUG nova.virt.hardware [None req-7bd8e5dd-5e58-4625-8471-f91ad349cc84 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Flavor pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 871.252774] env[62096]: DEBUG nova.virt.hardware [None req-7bd8e5dd-5e58-4625-8471-f91ad349cc84 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Image pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 871.253032] env[62096]: DEBUG nova.virt.hardware [None req-7bd8e5dd-5e58-4625-8471-f91ad349cc84 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 871.253374] env[62096]: DEBUG nova.virt.hardware [None req-7bd8e5dd-5e58-4625-8471-f91ad349cc84 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 871.253651] env[62096]: DEBUG nova.virt.hardware [None req-7bd8e5dd-5e58-4625-8471-f91ad349cc84 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 871.253939] env[62096]: DEBUG nova.virt.hardware [None req-7bd8e5dd-5e58-4625-8471-f91ad349cc84 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Got 1 possible topologies {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 871.254231] env[62096]: DEBUG nova.virt.hardware [None req-7bd8e5dd-5e58-4625-8471-f91ad349cc84 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 871.254532] env[62096]: DEBUG nova.virt.hardware [None req-7bd8e5dd-5e58-4625-8471-f91ad349cc84 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 871.255624] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88834005-57dc-4738-93fe-805a97f78e61 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.267316] env[62096]: DEBUG nova.compute.provider_tree [None req-3fd8e1c2-0ecc-40e5-90b0-01e465c5f366 tempest-ServersV294TestFqdnHostnames-2113678617 tempest-ServersV294TestFqdnHostnames-2113678617-project-member] Updating inventory in ProviderTree for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 871.267804] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-3fd8e1c2-0ecc-40e5-90b0-01e465c5f366 tempest-ServersV294TestFqdnHostnames-2113678617 tempest-ServersV294TestFqdnHostnames-2113678617-project-member] Expecting reply to msg 8a45900d77044cb7953f2981c391d27f in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 871.271597] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e466b0a-9559-4505-bd12-0ab968dd5be6 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.275077] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8a45900d77044cb7953f2981c391d27f [ 871.285525] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-7bd8e5dd-5e58-4625-8471-f91ad349cc84 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] [instance: 5627f913-29d2-476e-8fde-8ea457cc56f2] Instance VIF info [] {{(pid=62096) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 871.291621] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [None req-7bd8e5dd-5e58-4625-8471-f91ad349cc84 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Creating folder: Project (154853caca594b11a7b92e0226fc74d7). Parent ref: group-v107847. {{(pid=62096) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 871.292376] env[62096]: DEBUG nova.network.neutron [-] [instance: aa3e06b6-89f8-425f-8950-30deb025a473] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 871.292749] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 1fd0c122ee784dfaa22e602c80a26a12 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 871.293602] env[62096]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-548453e3-3d43-4e4e-8c2d-28e2fe30d30b {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.302581] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1fd0c122ee784dfaa22e602c80a26a12 [ 871.302941] env[62096]: INFO nova.virt.vmwareapi.vm_util [None req-7bd8e5dd-5e58-4625-8471-f91ad349cc84 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Created folder: Project (154853caca594b11a7b92e0226fc74d7) in parent group-v107847. [ 871.303116] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [None req-7bd8e5dd-5e58-4625-8471-f91ad349cc84 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Creating folder: Instances. Parent ref: group-v107870. {{(pid=62096) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 871.303323] env[62096]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9f9254b7-968e-4983-9641-b0c8a75211b7 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.312385] env[62096]: INFO nova.virt.vmwareapi.vm_util [None req-7bd8e5dd-5e58-4625-8471-f91ad349cc84 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Created folder: Instances in parent group-v107870. [ 871.312606] env[62096]: DEBUG oslo.service.loopingcall [None req-7bd8e5dd-5e58-4625-8471-f91ad349cc84 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 871.312781] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5627f913-29d2-476e-8fde-8ea457cc56f2] Creating VM on the ESX host {{(pid=62096) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 871.312961] env[62096]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bea3332f-b832-4757-8421-63a8c3e461d3 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.328183] env[62096]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 871.328183] env[62096]: value = "task-397430" [ 871.328183] env[62096]: _type = "Task" [ 871.328183] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.337592] env[62096]: DEBUG oslo_vmware.api [-] Task: {'id': task-397430, 'name': CreateVM_Task} progress is 0%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.485226] env[62096]: DEBUG oslo_concurrency.lockutils [None req-e0ec7c35-3ae1-4bda-b6d5-d68ea34f2cbc tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 871.796080] env[62096]: INFO nova.compute.manager [-] [instance: aa3e06b6-89f8-425f-8950-30deb025a473] Took 1.02 seconds to deallocate network for instance. [ 871.800167] env[62096]: DEBUG nova.scheduler.client.report [None req-3fd8e1c2-0ecc-40e5-90b0-01e465c5f366 tempest-ServersV294TestFqdnHostnames-2113678617 tempest-ServersV294TestFqdnHostnames-2113678617-project-member] Updated inventory for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 with generation 97 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:957}} [ 871.800167] env[62096]: DEBUG nova.compute.provider_tree [None req-3fd8e1c2-0ecc-40e5-90b0-01e465c5f366 tempest-ServersV294TestFqdnHostnames-2113678617 tempest-ServersV294TestFqdnHostnames-2113678617-project-member] Updating resource provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 generation from 97 to 98 during operation: update_inventory {{(pid=62096) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 871.800167] env[62096]: DEBUG nova.compute.provider_tree [None req-3fd8e1c2-0ecc-40e5-90b0-01e465c5f366 tempest-ServersV294TestFqdnHostnames-2113678617 tempest-ServersV294TestFqdnHostnames-2113678617-project-member] Updating inventory in ProviderTree for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 871.802625] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-3fd8e1c2-0ecc-40e5-90b0-01e465c5f366 tempest-ServersV294TestFqdnHostnames-2113678617 tempest-ServersV294TestFqdnHostnames-2113678617-project-member] Expecting reply to msg 48c45983dc484bddb217d8770861e0dd in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 871.803658] env[62096]: DEBUG nova.compute.claims [None req-bb5902e0-898e-4848-be60-0804ae5f57a5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: aa3e06b6-89f8-425f-8950-30deb025a473] Aborting claim: {{(pid=62096) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 871.803839] env[62096]: DEBUG oslo_concurrency.lockutils [None req-bb5902e0-898e-4848-be60-0804ae5f57a5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 871.816208] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 48c45983dc484bddb217d8770861e0dd [ 871.838391] env[62096]: DEBUG oslo_vmware.api [-] Task: {'id': task-397430, 'name': CreateVM_Task, 'duration_secs': 0.226425} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.838550] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5627f913-29d2-476e-8fde-8ea457cc56f2] Created VM on the ESX host {{(pid=62096) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 871.838943] env[62096]: DEBUG oslo_concurrency.lockutils [None req-7bd8e5dd-5e58-4625-8471-f91ad349cc84 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 871.839101] env[62096]: DEBUG oslo_concurrency.lockutils [None req-7bd8e5dd-5e58-4625-8471-f91ad349cc84 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Acquired lock "[datastore2] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 871.839411] env[62096]: DEBUG oslo_concurrency.lockutils [None req-7bd8e5dd-5e58-4625-8471-f91ad349cc84 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 871.839647] env[62096]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c769a0dd-b167-4236-a4cd-fc35578a5247 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.843635] env[62096]: DEBUG oslo_vmware.api [None req-7bd8e5dd-5e58-4625-8471-f91ad349cc84 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Waiting for the task: (returnval){ [ 871.843635] env[62096]: value = "session[5264b2d0-7e3b-3d3c-bfd1-bd630cad6eae]5278852b-cc0b-b06b-be66-78f30d1868cd" [ 871.843635] env[62096]: _type = "Task" [ 871.843635] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.852425] env[62096]: DEBUG oslo_vmware.api [None req-7bd8e5dd-5e58-4625-8471-f91ad349cc84 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Task: {'id': session[5264b2d0-7e3b-3d3c-bfd1-bd630cad6eae]5278852b-cc0b-b06b-be66-78f30d1868cd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.916471] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-787cee2b-5948-46d6-9396-dac712351d47 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Expecting reply to msg d811b8ab7ee74dbeaf6ab42ffd504092 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 871.925804] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d811b8ab7ee74dbeaf6ab42ffd504092 [ 872.305539] env[62096]: DEBUG oslo_concurrency.lockutils [None req-3fd8e1c2-0ecc-40e5-90b0-01e465c5f366 tempest-ServersV294TestFqdnHostnames-2113678617 tempest-ServersV294TestFqdnHostnames-2113678617-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.119s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 872.306090] env[62096]: DEBUG nova.compute.manager [None req-3fd8e1c2-0ecc-40e5-90b0-01e465c5f366 tempest-ServersV294TestFqdnHostnames-2113678617 tempest-ServersV294TestFqdnHostnames-2113678617-project-member] [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] Start building networks asynchronously for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 872.307982] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-3fd8e1c2-0ecc-40e5-90b0-01e465c5f366 tempest-ServersV294TestFqdnHostnames-2113678617 tempest-ServersV294TestFqdnHostnames-2113678617-project-member] Expecting reply to msg 2de61ddd38f640f5b07d458f5f0b59a1 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 872.309076] env[62096]: DEBUG oslo_concurrency.lockutils [None req-5ad96e0f-1e23-473c-97f9-ade218cb855a tempest-InstanceActionsTestJSON-629359483 tempest-InstanceActionsTestJSON-629359483-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.739s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 872.310701] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-5ad96e0f-1e23-473c-97f9-ade218cb855a tempest-InstanceActionsTestJSON-629359483 tempest-InstanceActionsTestJSON-629359483-project-member] Expecting reply to msg ac970ca37c4b47a88e3864e8b83e583f in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 872.343224] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2de61ddd38f640f5b07d458f5f0b59a1 [ 872.344037] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ac970ca37c4b47a88e3864e8b83e583f [ 872.354460] env[62096]: DEBUG oslo_vmware.api [None req-7bd8e5dd-5e58-4625-8471-f91ad349cc84 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Task: {'id': session[5264b2d0-7e3b-3d3c-bfd1-bd630cad6eae]5278852b-cc0b-b06b-be66-78f30d1868cd, 'name': SearchDatastore_Task, 'duration_secs': 0.008577} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.354730] env[62096]: DEBUG oslo_concurrency.lockutils [None req-7bd8e5dd-5e58-4625-8471-f91ad349cc84 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Releasing lock "[datastore2] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 872.354954] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-7bd8e5dd-5e58-4625-8471-f91ad349cc84 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] [instance: 5627f913-29d2-476e-8fde-8ea457cc56f2] Processing image fb764baa-3805-45c4-a694-aa91b0932110 {{(pid=62096) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 872.355177] env[62096]: DEBUG oslo_concurrency.lockutils [None req-7bd8e5dd-5e58-4625-8471-f91ad349cc84 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110/fb764baa-3805-45c4-a694-aa91b0932110.vmdk" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 872.355321] env[62096]: DEBUG oslo_concurrency.lockutils [None req-7bd8e5dd-5e58-4625-8471-f91ad349cc84 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Acquired lock "[datastore2] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110/fb764baa-3805-45c4-a694-aa91b0932110.vmdk" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 872.355491] env[62096]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bd8e5dd-5e58-4625-8471-f91ad349cc84 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62096) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 872.355733] env[62096]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-19c39706-a83c-40be-aab1-6822dda09d36 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.363164] env[62096]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bd8e5dd-5e58-4625-8471-f91ad349cc84 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62096) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 872.363331] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-7bd8e5dd-5e58-4625-8471-f91ad349cc84 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62096) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 872.363998] env[62096]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b48a2587-c572-41ef-80ad-64edbdb647d3 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.368703] env[62096]: DEBUG oslo_vmware.api [None req-7bd8e5dd-5e58-4625-8471-f91ad349cc84 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Waiting for the task: (returnval){ [ 872.368703] env[62096]: value = "session[5264b2d0-7e3b-3d3c-bfd1-bd630cad6eae]52aa09f7-a96f-0ae5-d362-9f8f24e8c314" [ 872.368703] env[62096]: _type = "Task" [ 872.368703] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.375518] env[62096]: DEBUG oslo_vmware.api [None req-7bd8e5dd-5e58-4625-8471-f91ad349cc84 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Task: {'id': session[5264b2d0-7e3b-3d3c-bfd1-bd630cad6eae]52aa09f7-a96f-0ae5-d362-9f8f24e8c314, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.419269] env[62096]: DEBUG oslo_concurrency.lockutils [None req-787cee2b-5948-46d6-9396-dac712351d47 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Acquiring lock "a7838568-9d47-4306-8bb6-9ad74ab1feb3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 872.419269] env[62096]: DEBUG oslo_concurrency.lockutils [None req-787cee2b-5948-46d6-9396-dac712351d47 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Lock "a7838568-9d47-4306-8bb6-9ad74ab1feb3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 872.419269] env[62096]: DEBUG oslo_concurrency.lockutils [None req-787cee2b-5948-46d6-9396-dac712351d47 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Acquiring lock "a7838568-9d47-4306-8bb6-9ad74ab1feb3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 872.419439] env[62096]: DEBUG oslo_concurrency.lockutils [None req-787cee2b-5948-46d6-9396-dac712351d47 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Lock "a7838568-9d47-4306-8bb6-9ad74ab1feb3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 872.419558] env[62096]: DEBUG oslo_concurrency.lockutils [None req-787cee2b-5948-46d6-9396-dac712351d47 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Lock "a7838568-9d47-4306-8bb6-9ad74ab1feb3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 872.422393] env[62096]: INFO nova.compute.manager [None req-787cee2b-5948-46d6-9396-dac712351d47 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] [instance: a7838568-9d47-4306-8bb6-9ad74ab1feb3] Terminating instance [ 872.424203] env[62096]: DEBUG oslo_concurrency.lockutils [None req-787cee2b-5948-46d6-9396-dac712351d47 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Acquiring lock "refresh_cache-a7838568-9d47-4306-8bb6-9ad74ab1feb3" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 872.424361] env[62096]: DEBUG oslo_concurrency.lockutils [None req-787cee2b-5948-46d6-9396-dac712351d47 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Acquired lock "refresh_cache-a7838568-9d47-4306-8bb6-9ad74ab1feb3" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 872.424525] env[62096]: DEBUG nova.network.neutron [None req-787cee2b-5948-46d6-9396-dac712351d47 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] [instance: a7838568-9d47-4306-8bb6-9ad74ab1feb3] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 872.425257] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-787cee2b-5948-46d6-9396-dac712351d47 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Expecting reply to msg 78c92b1a17264569b5c4ae91ff0663f8 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 872.431447] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 78c92b1a17264569b5c4ae91ff0663f8 [ 872.816564] env[62096]: DEBUG nova.compute.utils [None req-3fd8e1c2-0ecc-40e5-90b0-01e465c5f366 tempest-ServersV294TestFqdnHostnames-2113678617 tempest-ServersV294TestFqdnHostnames-2113678617-project-member] Using /dev/sd instead of None {{(pid=62096) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 872.817180] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-3fd8e1c2-0ecc-40e5-90b0-01e465c5f366 tempest-ServersV294TestFqdnHostnames-2113678617 tempest-ServersV294TestFqdnHostnames-2113678617-project-member] Expecting reply to msg d833e08662a3409fb527fa432122328f in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 872.818294] env[62096]: DEBUG nova.compute.manager [None req-3fd8e1c2-0ecc-40e5-90b0-01e465c5f366 tempest-ServersV294TestFqdnHostnames-2113678617 tempest-ServersV294TestFqdnHostnames-2113678617-project-member] [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] Allocating IP information in the background. {{(pid=62096) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 872.818478] env[62096]: DEBUG nova.network.neutron [None req-3fd8e1c2-0ecc-40e5-90b0-01e465c5f366 tempest-ServersV294TestFqdnHostnames-2113678617 tempest-ServersV294TestFqdnHostnames-2113678617-project-member] [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] allocate_for_instance() {{(pid=62096) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 872.827684] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d833e08662a3409fb527fa432122328f [ 872.873655] env[62096]: DEBUG nova.policy [None req-3fd8e1c2-0ecc-40e5-90b0-01e465c5f366 tempest-ServersV294TestFqdnHostnames-2113678617 tempest-ServersV294TestFqdnHostnames-2113678617-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e456597e1b8f4a61a423742b038be803', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '82533204d0f84d2690c292fc5c1d7d7f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62096) authorize /opt/stack/nova/nova/policy.py:203}} [ 872.882906] env[62096]: DEBUG oslo_vmware.api [None req-7bd8e5dd-5e58-4625-8471-f91ad349cc84 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Task: {'id': session[5264b2d0-7e3b-3d3c-bfd1-bd630cad6eae]52aa09f7-a96f-0ae5-d362-9f8f24e8c314, 'name': SearchDatastore_Task, 'duration_secs': 0.007361} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.883698] env[62096]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eac5e072-6688-4729-8730-ae70e870c2bf {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.891675] env[62096]: DEBUG oslo_vmware.api [None req-7bd8e5dd-5e58-4625-8471-f91ad349cc84 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Waiting for the task: (returnval){ [ 872.891675] env[62096]: value = "session[5264b2d0-7e3b-3d3c-bfd1-bd630cad6eae]5244e937-6ed8-79d5-7212-ee32171046e5" [ 872.891675] env[62096]: _type = "Task" [ 872.891675] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.905472] env[62096]: DEBUG oslo_vmware.api [None req-7bd8e5dd-5e58-4625-8471-f91ad349cc84 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Task: {'id': session[5264b2d0-7e3b-3d3c-bfd1-bd630cad6eae]5244e937-6ed8-79d5-7212-ee32171046e5, 'name': SearchDatastore_Task, 'duration_secs': 0.008775} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.905721] env[62096]: DEBUG oslo_concurrency.lockutils [None req-7bd8e5dd-5e58-4625-8471-f91ad349cc84 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Releasing lock "[datastore2] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110/fb764baa-3805-45c4-a694-aa91b0932110.vmdk" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 872.905966] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [None req-7bd8e5dd-5e58-4625-8471-f91ad349cc84 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110/fb764baa-3805-45c4-a694-aa91b0932110.vmdk to [datastore2] 5627f913-29d2-476e-8fde-8ea457cc56f2/5627f913-29d2-476e-8fde-8ea457cc56f2.vmdk {{(pid=62096) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 872.906213] env[62096]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5bdaace1-85c4-4dc5-8354-723be5ae6470 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.912261] env[62096]: DEBUG oslo_vmware.api [None req-7bd8e5dd-5e58-4625-8471-f91ad349cc84 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Waiting for the task: (returnval){ [ 872.912261] env[62096]: value = "task-397431" [ 872.912261] env[62096]: _type = "Task" [ 872.912261] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.920331] env[62096]: DEBUG oslo_vmware.api [None req-7bd8e5dd-5e58-4625-8471-f91ad349cc84 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Task: {'id': task-397431, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.944177] env[62096]: DEBUG nova.network.neutron [None req-787cee2b-5948-46d6-9396-dac712351d47 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] [instance: a7838568-9d47-4306-8bb6-9ad74ab1feb3] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 873.016501] env[62096]: DEBUG nova.network.neutron [None req-787cee2b-5948-46d6-9396-dac712351d47 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] [instance: a7838568-9d47-4306-8bb6-9ad74ab1feb3] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 873.017349] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-787cee2b-5948-46d6-9396-dac712351d47 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Expecting reply to msg 638fdd6f5a5447ccbbe1951533d12ba1 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 873.026823] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 638fdd6f5a5447ccbbe1951533d12ba1 [ 873.055682] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-daa319fb-5f25-4dbf-bd0d-af94b130fc94 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.064292] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0404fe8-8583-4616-963a-37ab2fcd6b85 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.094657] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17c87de6-6d1d-4e89-b417-e727e7ad9299 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.103132] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6e9c065-7958-448e-a68f-9cc91b8c7d13 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.118235] env[62096]: DEBUG nova.compute.provider_tree [None req-5ad96e0f-1e23-473c-97f9-ade218cb855a tempest-InstanceActionsTestJSON-629359483 tempest-InstanceActionsTestJSON-629359483-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 873.118777] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-5ad96e0f-1e23-473c-97f9-ade218cb855a tempest-InstanceActionsTestJSON-629359483 tempest-InstanceActionsTestJSON-629359483-project-member] Expecting reply to msg 58b3002a2f6741fe948d1c7a7339009b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 873.128621] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 58b3002a2f6741fe948d1c7a7339009b [ 873.214059] env[62096]: DEBUG nova.network.neutron [None req-3fd8e1c2-0ecc-40e5-90b0-01e465c5f366 tempest-ServersV294TestFqdnHostnames-2113678617 tempest-ServersV294TestFqdnHostnames-2113678617-project-member] [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] Successfully created port: 4b8eb4ba-d577-4e7a-8576-502e273b223b {{(pid=62096) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 873.321889] env[62096]: DEBUG nova.compute.manager [None req-3fd8e1c2-0ecc-40e5-90b0-01e465c5f366 tempest-ServersV294TestFqdnHostnames-2113678617 tempest-ServersV294TestFqdnHostnames-2113678617-project-member] [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] Start building block device mappings for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 873.324109] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-3fd8e1c2-0ecc-40e5-90b0-01e465c5f366 tempest-ServersV294TestFqdnHostnames-2113678617 tempest-ServersV294TestFqdnHostnames-2113678617-project-member] Expecting reply to msg 27bb6324e74141968ef3ff63fff4d906 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 873.358639] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 27bb6324e74141968ef3ff63fff4d906 [ 873.422958] env[62096]: DEBUG oslo_vmware.api [None req-7bd8e5dd-5e58-4625-8471-f91ad349cc84 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Task: {'id': task-397431, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.428518} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.423429] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [None req-7bd8e5dd-5e58-4625-8471-f91ad349cc84 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110/fb764baa-3805-45c4-a694-aa91b0932110.vmdk to [datastore2] 5627f913-29d2-476e-8fde-8ea457cc56f2/5627f913-29d2-476e-8fde-8ea457cc56f2.vmdk {{(pid=62096) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 873.423833] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-7bd8e5dd-5e58-4625-8471-f91ad349cc84 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] [instance: 5627f913-29d2-476e-8fde-8ea457cc56f2] Extending root virtual disk to 1048576 {{(pid=62096) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 873.424557] env[62096]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b5d98ece-ac97-4d8e-be2b-ce63993d835f {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.431157] env[62096]: DEBUG oslo_vmware.api [None req-7bd8e5dd-5e58-4625-8471-f91ad349cc84 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Waiting for the task: (returnval){ [ 873.431157] env[62096]: value = "task-397432" [ 873.431157] env[62096]: _type = "Task" [ 873.431157] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.441706] env[62096]: DEBUG oslo_vmware.api [None req-7bd8e5dd-5e58-4625-8471-f91ad349cc84 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Task: {'id': task-397432, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.520135] env[62096]: DEBUG oslo_concurrency.lockutils [None req-787cee2b-5948-46d6-9396-dac712351d47 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Releasing lock "refresh_cache-a7838568-9d47-4306-8bb6-9ad74ab1feb3" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 873.520795] env[62096]: DEBUG nova.compute.manager [None req-787cee2b-5948-46d6-9396-dac712351d47 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] [instance: a7838568-9d47-4306-8bb6-9ad74ab1feb3] Start destroying the instance on the hypervisor. {{(pid=62096) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 873.521111] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-787cee2b-5948-46d6-9396-dac712351d47 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] [instance: a7838568-9d47-4306-8bb6-9ad74ab1feb3] Destroying instance {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 873.522145] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f670fe7c-5792-4821-968a-6a5d9dff3233 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.531192] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [None req-787cee2b-5948-46d6-9396-dac712351d47 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] [instance: a7838568-9d47-4306-8bb6-9ad74ab1feb3] Powering off the VM {{(pid=62096) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 873.531545] env[62096]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c320c9b2-ed7d-4ef3-b8a6-77cc87c90a28 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.537915] env[62096]: DEBUG oslo_vmware.api [None req-787cee2b-5948-46d6-9396-dac712351d47 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Waiting for the task: (returnval){ [ 873.537915] env[62096]: value = "task-397433" [ 873.537915] env[62096]: _type = "Task" [ 873.537915] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.546202] env[62096]: DEBUG oslo_vmware.api [None req-787cee2b-5948-46d6-9396-dac712351d47 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Task: {'id': task-397433, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.621466] env[62096]: DEBUG nova.scheduler.client.report [None req-5ad96e0f-1e23-473c-97f9-ade218cb855a tempest-InstanceActionsTestJSON-629359483 tempest-InstanceActionsTestJSON-629359483-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 873.624245] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-5ad96e0f-1e23-473c-97f9-ade218cb855a tempest-InstanceActionsTestJSON-629359483 tempest-InstanceActionsTestJSON-629359483-project-member] Expecting reply to msg f4869e48b3ce4ced8e58ac368bef9781 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 873.637736] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f4869e48b3ce4ced8e58ac368bef9781 [ 873.841231] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-3fd8e1c2-0ecc-40e5-90b0-01e465c5f366 tempest-ServersV294TestFqdnHostnames-2113678617 tempest-ServersV294TestFqdnHostnames-2113678617-project-member] Expecting reply to msg 9a53a306528a421d999d013f487f9a98 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 873.870435] env[62096]: DEBUG nova.compute.manager [req-5a6e85c2-dcef-43f9-b586-a6049c21d63f req-f9b4bb22-f68f-4815-b1a3-96993e060043 service nova] [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] Received event network-changed-4b8eb4ba-d577-4e7a-8576-502e273b223b {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 873.870435] env[62096]: DEBUG nova.compute.manager [req-5a6e85c2-dcef-43f9-b586-a6049c21d63f req-f9b4bb22-f68f-4815-b1a3-96993e060043 service nova] [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] Refreshing instance network info cache due to event network-changed-4b8eb4ba-d577-4e7a-8576-502e273b223b. {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 873.870644] env[62096]: DEBUG oslo_concurrency.lockutils [req-5a6e85c2-dcef-43f9-b586-a6049c21d63f req-f9b4bb22-f68f-4815-b1a3-96993e060043 service nova] Acquiring lock "refresh_cache-9b85f3a6-892f-4eb4-b0d8-aacaf6576c78" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 873.870790] env[62096]: DEBUG oslo_concurrency.lockutils [req-5a6e85c2-dcef-43f9-b586-a6049c21d63f req-f9b4bb22-f68f-4815-b1a3-96993e060043 service nova] Acquired lock "refresh_cache-9b85f3a6-892f-4eb4-b0d8-aacaf6576c78" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 873.870949] env[62096]: DEBUG nova.network.neutron [req-5a6e85c2-dcef-43f9-b586-a6049c21d63f req-f9b4bb22-f68f-4815-b1a3-96993e060043 service nova] [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] Refreshing network info cache for port 4b8eb4ba-d577-4e7a-8576-502e273b223b {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 873.871400] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-5a6e85c2-dcef-43f9-b586-a6049c21d63f req-f9b4bb22-f68f-4815-b1a3-96993e060043 service nova] Expecting reply to msg 0c30e336c6fb42659fcc619dc9a90dd9 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 873.878237] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0c30e336c6fb42659fcc619dc9a90dd9 [ 873.886789] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9a53a306528a421d999d013f487f9a98 [ 873.942174] env[62096]: DEBUG oslo_vmware.api [None req-7bd8e5dd-5e58-4625-8471-f91ad349cc84 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Task: {'id': task-397432, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.058271} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.942584] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-7bd8e5dd-5e58-4625-8471-f91ad349cc84 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] [instance: 5627f913-29d2-476e-8fde-8ea457cc56f2] Extended root virtual disk {{(pid=62096) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 873.943521] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e55d7a45-4582-4122-9d45-0ec23930e63d {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.962720] env[62096]: DEBUG nova.virt.vmwareapi.volumeops [None req-7bd8e5dd-5e58-4625-8471-f91ad349cc84 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] [instance: 5627f913-29d2-476e-8fde-8ea457cc56f2] Reconfiguring VM instance instance-00000048 to attach disk [datastore2] 5627f913-29d2-476e-8fde-8ea457cc56f2/5627f913-29d2-476e-8fde-8ea457cc56f2.vmdk or device None with type sparse {{(pid=62096) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 873.963178] env[62096]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-be051067-9684-4b36-80b1-f642afe67ec7 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.983869] env[62096]: DEBUG oslo_vmware.api [None req-7bd8e5dd-5e58-4625-8471-f91ad349cc84 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Waiting for the task: (returnval){ [ 873.983869] env[62096]: value = "task-397434" [ 873.983869] env[62096]: _type = "Task" [ 873.983869] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.991846] env[62096]: DEBUG oslo_vmware.api [None req-7bd8e5dd-5e58-4625-8471-f91ad349cc84 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Task: {'id': task-397434, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.036970] env[62096]: ERROR nova.compute.manager [None req-3fd8e1c2-0ecc-40e5-90b0-01e465c5f366 tempest-ServersV294TestFqdnHostnames-2113678617 tempest-ServersV294TestFqdnHostnames-2113678617-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 4b8eb4ba-d577-4e7a-8576-502e273b223b, please check neutron logs for more information. [ 874.036970] env[62096]: ERROR nova.compute.manager Traceback (most recent call last): [ 874.036970] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 874.036970] env[62096]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 874.036970] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 874.036970] env[62096]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 874.036970] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 874.036970] env[62096]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 874.036970] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 874.036970] env[62096]: ERROR nova.compute.manager self.force_reraise() [ 874.036970] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 874.036970] env[62096]: ERROR nova.compute.manager raise self.value [ 874.036970] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 874.036970] env[62096]: ERROR nova.compute.manager updated_port = self._update_port( [ 874.036970] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 874.036970] env[62096]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 874.037495] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 874.037495] env[62096]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 874.037495] env[62096]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 4b8eb4ba-d577-4e7a-8576-502e273b223b, please check neutron logs for more information. [ 874.037495] env[62096]: ERROR nova.compute.manager [ 874.037825] env[62096]: Traceback (most recent call last): [ 874.037920] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 874.037920] env[62096]: listener.cb(fileno) [ 874.037991] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 874.037991] env[62096]: result = function(*args, **kwargs) [ 874.038072] env[62096]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 874.038072] env[62096]: return func(*args, **kwargs) [ 874.038136] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 874.038136] env[62096]: raise e [ 874.038204] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 874.038204] env[62096]: nwinfo = self.network_api.allocate_for_instance( [ 874.038274] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 874.038274] env[62096]: created_port_ids = self._update_ports_for_instance( [ 874.038342] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 874.038342] env[62096]: with excutils.save_and_reraise_exception(): [ 874.038422] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 874.038422] env[62096]: self.force_reraise() [ 874.038532] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 874.038532] env[62096]: raise self.value [ 874.038623] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 874.038623] env[62096]: updated_port = self._update_port( [ 874.038693] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 874.038693] env[62096]: _ensure_no_port_binding_failure(port) [ 874.038759] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 874.038759] env[62096]: raise exception.PortBindingFailed(port_id=port['id']) [ 874.038839] env[62096]: nova.exception.PortBindingFailed: Binding failed for port 4b8eb4ba-d577-4e7a-8576-502e273b223b, please check neutron logs for more information. [ 874.038894] env[62096]: Removing descriptor: 14 [ 874.047662] env[62096]: DEBUG oslo_vmware.api [None req-787cee2b-5948-46d6-9396-dac712351d47 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Task: {'id': task-397433, 'name': PowerOffVM_Task, 'duration_secs': 0.117072} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.048158] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [None req-787cee2b-5948-46d6-9396-dac712351d47 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] [instance: a7838568-9d47-4306-8bb6-9ad74ab1feb3] Powered off the VM {{(pid=62096) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 874.048454] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-787cee2b-5948-46d6-9396-dac712351d47 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] [instance: a7838568-9d47-4306-8bb6-9ad74ab1feb3] Unregistering the VM {{(pid=62096) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 874.048794] env[62096]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c8f361b8-9005-4856-8d80-1deda44934cb {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.076794] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-787cee2b-5948-46d6-9396-dac712351d47 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] [instance: a7838568-9d47-4306-8bb6-9ad74ab1feb3] Unregistered the VM {{(pid=62096) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 874.077204] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-787cee2b-5948-46d6-9396-dac712351d47 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] [instance: a7838568-9d47-4306-8bb6-9ad74ab1feb3] Deleting contents of the VM from datastore datastore2 {{(pid=62096) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 874.077513] env[62096]: DEBUG nova.virt.vmwareapi.ds_util [None req-787cee2b-5948-46d6-9396-dac712351d47 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Deleting the datastore file [datastore2] a7838568-9d47-4306-8bb6-9ad74ab1feb3 {{(pid=62096) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 874.077900] env[62096]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-861404d3-5eb6-441a-8e36-61bf2f8595a6 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.084430] env[62096]: DEBUG oslo_vmware.api [None req-787cee2b-5948-46d6-9396-dac712351d47 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Waiting for the task: (returnval){ [ 874.084430] env[62096]: value = "task-397436" [ 874.084430] env[62096]: _type = "Task" [ 874.084430] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.098018] env[62096]: DEBUG oslo_vmware.api [None req-787cee2b-5948-46d6-9396-dac712351d47 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Task: {'id': task-397436, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.136025] env[62096]: DEBUG oslo_concurrency.lockutils [None req-5ad96e0f-1e23-473c-97f9-ade218cb855a tempest-InstanceActionsTestJSON-629359483 tempest-InstanceActionsTestJSON-629359483-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.818s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 874.136025] env[62096]: ERROR nova.compute.manager [None req-5ad96e0f-1e23-473c-97f9-ade218cb855a tempest-InstanceActionsTestJSON-629359483 tempest-InstanceActionsTestJSON-629359483-project-member] [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 58359077-282b-4662-a530-b190af2de477, please check neutron logs for more information. [ 874.136025] env[62096]: ERROR nova.compute.manager [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] Traceback (most recent call last): [ 874.136025] env[62096]: ERROR nova.compute.manager [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 874.136025] env[62096]: ERROR nova.compute.manager [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] self.driver.spawn(context, instance, image_meta, [ 874.136025] env[62096]: ERROR nova.compute.manager [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 874.136025] env[62096]: ERROR nova.compute.manager [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 874.136025] env[62096]: ERROR nova.compute.manager [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 874.136025] env[62096]: ERROR nova.compute.manager [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] vm_ref = self.build_virtual_machine(instance, [ 874.136414] env[62096]: ERROR nova.compute.manager [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 874.136414] env[62096]: ERROR nova.compute.manager [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] vif_infos = vmwarevif.get_vif_info(self._session, [ 874.136414] env[62096]: ERROR nova.compute.manager [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 874.136414] env[62096]: ERROR nova.compute.manager [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] for vif in network_info: [ 874.136414] env[62096]: ERROR nova.compute.manager [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 874.136414] env[62096]: ERROR nova.compute.manager [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] return self._sync_wrapper(fn, *args, **kwargs) [ 874.136414] env[62096]: ERROR nova.compute.manager [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 874.136414] env[62096]: ERROR nova.compute.manager [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] self.wait() [ 874.136414] env[62096]: ERROR nova.compute.manager [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 874.136414] env[62096]: ERROR nova.compute.manager [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] self[:] = self._gt.wait() [ 874.136414] env[62096]: ERROR nova.compute.manager [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 874.136414] env[62096]: ERROR nova.compute.manager [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] return self._exit_event.wait() [ 874.136414] env[62096]: ERROR nova.compute.manager [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 874.136734] env[62096]: ERROR nova.compute.manager [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] current.throw(*self._exc) [ 874.136734] env[62096]: ERROR nova.compute.manager [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 874.136734] env[62096]: ERROR nova.compute.manager [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] result = function(*args, **kwargs) [ 874.136734] env[62096]: ERROR nova.compute.manager [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 874.136734] env[62096]: ERROR nova.compute.manager [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] return func(*args, **kwargs) [ 874.136734] env[62096]: ERROR nova.compute.manager [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 874.136734] env[62096]: ERROR nova.compute.manager [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] raise e [ 874.136734] env[62096]: ERROR nova.compute.manager [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 874.136734] env[62096]: ERROR nova.compute.manager [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] nwinfo = self.network_api.allocate_for_instance( [ 874.136734] env[62096]: ERROR nova.compute.manager [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 874.136734] env[62096]: ERROR nova.compute.manager [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] created_port_ids = self._update_ports_for_instance( [ 874.136734] env[62096]: ERROR nova.compute.manager [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 874.136734] env[62096]: ERROR nova.compute.manager [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] with excutils.save_and_reraise_exception(): [ 874.137043] env[62096]: ERROR nova.compute.manager [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 874.137043] env[62096]: ERROR nova.compute.manager [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] self.force_reraise() [ 874.137043] env[62096]: ERROR nova.compute.manager [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 874.137043] env[62096]: ERROR nova.compute.manager [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] raise self.value [ 874.137043] env[62096]: ERROR nova.compute.manager [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 874.137043] env[62096]: ERROR nova.compute.manager [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] updated_port = self._update_port( [ 874.137043] env[62096]: ERROR nova.compute.manager [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 874.137043] env[62096]: ERROR nova.compute.manager [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] _ensure_no_port_binding_failure(port) [ 874.137043] env[62096]: ERROR nova.compute.manager [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 874.137043] env[62096]: ERROR nova.compute.manager [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] raise exception.PortBindingFailed(port_id=port['id']) [ 874.137043] env[62096]: ERROR nova.compute.manager [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] nova.exception.PortBindingFailed: Binding failed for port 58359077-282b-4662-a530-b190af2de477, please check neutron logs for more information. [ 874.137043] env[62096]: ERROR nova.compute.manager [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] [ 874.137331] env[62096]: DEBUG nova.compute.utils [None req-5ad96e0f-1e23-473c-97f9-ade218cb855a tempest-InstanceActionsTestJSON-629359483 tempest-InstanceActionsTestJSON-629359483-project-member] [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] Binding failed for port 58359077-282b-4662-a530-b190af2de477, please check neutron logs for more information. {{(pid=62096) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 874.137331] env[62096]: DEBUG oslo_concurrency.lockutils [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 15.765s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 874.137331] env[62096]: DEBUG oslo_concurrency.lockutils [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 874.137331] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62096) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 874.137331] env[62096]: DEBUG oslo_concurrency.lockutils [None req-9083a630-3722-46bd-841d-5e4bacb9e547 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 14.952s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 874.137470] env[62096]: DEBUG nova.objects.instance [None req-9083a630-3722-46bd-841d-5e4bacb9e547 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] [instance: 250ef7e7-266b-451d-8627-9cce211d4e83] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62096) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 874.137470] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-9083a630-3722-46bd-841d-5e4bacb9e547 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Expecting reply to msg 92762e2efce449b5ae6c1a3b41b5699a in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 874.137470] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b11f487-a4bb-4b1c-9a70-5ef51c51a6b2 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.138735] env[62096]: DEBUG nova.compute.manager [None req-5ad96e0f-1e23-473c-97f9-ade218cb855a tempest-InstanceActionsTestJSON-629359483 tempest-InstanceActionsTestJSON-629359483-project-member] [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] Build of instance 6cc2a2c1-613d-40bd-a375-424b84b66ac9 was re-scheduled: Binding failed for port 58359077-282b-4662-a530-b190af2de477, please check neutron logs for more information. {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 874.139332] env[62096]: DEBUG nova.compute.manager [None req-5ad96e0f-1e23-473c-97f9-ade218cb855a tempest-InstanceActionsTestJSON-629359483 tempest-InstanceActionsTestJSON-629359483-project-member] [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] Unplugging VIFs for instance {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 874.139661] env[62096]: DEBUG oslo_concurrency.lockutils [None req-5ad96e0f-1e23-473c-97f9-ade218cb855a tempest-InstanceActionsTestJSON-629359483 tempest-InstanceActionsTestJSON-629359483-project-member] Acquiring lock "refresh_cache-6cc2a2c1-613d-40bd-a375-424b84b66ac9" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 874.139915] env[62096]: DEBUG oslo_concurrency.lockutils [None req-5ad96e0f-1e23-473c-97f9-ade218cb855a tempest-InstanceActionsTestJSON-629359483 tempest-InstanceActionsTestJSON-629359483-project-member] Acquired lock "refresh_cache-6cc2a2c1-613d-40bd-a375-424b84b66ac9" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 874.140194] env[62096]: DEBUG nova.network.neutron [None req-5ad96e0f-1e23-473c-97f9-ade218cb855a tempest-InstanceActionsTestJSON-629359483 tempest-InstanceActionsTestJSON-629359483-project-member] [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 874.140808] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-5ad96e0f-1e23-473c-97f9-ade218cb855a tempest-InstanceActionsTestJSON-629359483 tempest-InstanceActionsTestJSON-629359483-project-member] Expecting reply to msg 7a8205650a344ce8b49f0ac2eae58df7 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 874.147795] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7a8205650a344ce8b49f0ac2eae58df7 [ 874.151792] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85237d45-5e3f-4302-b165-ff4b6f2d3336 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.168505] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72699683-1427-40c8-bb59-53495394dbbc {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.175515] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f6afa18-b191-4376-a150-e36d8ce3187d {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.182168] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 92762e2efce449b5ae6c1a3b41b5699a [ 874.210088] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181779MB free_disk=126GB free_vcpus=48 pci_devices=None {{(pid=62096) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 874.210430] env[62096]: DEBUG oslo_concurrency.lockutils [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 874.349665] env[62096]: DEBUG nova.compute.manager [None req-3fd8e1c2-0ecc-40e5-90b0-01e465c5f366 tempest-ServersV294TestFqdnHostnames-2113678617 tempest-ServersV294TestFqdnHostnames-2113678617-project-member] [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] Start spawning the instance on the hypervisor. {{(pid=62096) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 874.376189] env[62096]: DEBUG nova.virt.hardware [None req-3fd8e1c2-0ecc-40e5-90b0-01e465c5f366 tempest-ServersV294TestFqdnHostnames-2113678617 tempest-ServersV294TestFqdnHostnames-2113678617-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-19T11:52:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-19T11:51:55Z,direct_url=,disk_format='vmdk',id=fb764baa-3805-45c4-a694-aa91b0932110,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bd76085d004b408bbcf1ebf76ba2593e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-19T11:51:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 874.376674] env[62096]: DEBUG nova.virt.hardware [None req-3fd8e1c2-0ecc-40e5-90b0-01e465c5f366 tempest-ServersV294TestFqdnHostnames-2113678617 tempest-ServersV294TestFqdnHostnames-2113678617-project-member] Flavor limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 874.376965] env[62096]: DEBUG nova.virt.hardware [None req-3fd8e1c2-0ecc-40e5-90b0-01e465c5f366 tempest-ServersV294TestFqdnHostnames-2113678617 tempest-ServersV294TestFqdnHostnames-2113678617-project-member] Image limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 874.377264] env[62096]: DEBUG nova.virt.hardware [None req-3fd8e1c2-0ecc-40e5-90b0-01e465c5f366 tempest-ServersV294TestFqdnHostnames-2113678617 tempest-ServersV294TestFqdnHostnames-2113678617-project-member] Flavor pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 874.377517] env[62096]: DEBUG nova.virt.hardware [None req-3fd8e1c2-0ecc-40e5-90b0-01e465c5f366 tempest-ServersV294TestFqdnHostnames-2113678617 tempest-ServersV294TestFqdnHostnames-2113678617-project-member] Image pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 874.377871] env[62096]: DEBUG nova.virt.hardware [None req-3fd8e1c2-0ecc-40e5-90b0-01e465c5f366 tempest-ServersV294TestFqdnHostnames-2113678617 tempest-ServersV294TestFqdnHostnames-2113678617-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 874.378191] env[62096]: DEBUG nova.virt.hardware [None req-3fd8e1c2-0ecc-40e5-90b0-01e465c5f366 tempest-ServersV294TestFqdnHostnames-2113678617 tempest-ServersV294TestFqdnHostnames-2113678617-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 874.378464] env[62096]: DEBUG nova.virt.hardware [None req-3fd8e1c2-0ecc-40e5-90b0-01e465c5f366 tempest-ServersV294TestFqdnHostnames-2113678617 tempest-ServersV294TestFqdnHostnames-2113678617-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 874.378742] env[62096]: DEBUG nova.virt.hardware [None req-3fd8e1c2-0ecc-40e5-90b0-01e465c5f366 tempest-ServersV294TestFqdnHostnames-2113678617 tempest-ServersV294TestFqdnHostnames-2113678617-project-member] Got 1 possible topologies {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 874.379014] env[62096]: DEBUG nova.virt.hardware [None req-3fd8e1c2-0ecc-40e5-90b0-01e465c5f366 tempest-ServersV294TestFqdnHostnames-2113678617 tempest-ServersV294TestFqdnHostnames-2113678617-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 874.379309] env[62096]: DEBUG nova.virt.hardware [None req-3fd8e1c2-0ecc-40e5-90b0-01e465c5f366 tempest-ServersV294TestFqdnHostnames-2113678617 tempest-ServersV294TestFqdnHostnames-2113678617-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 874.382293] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84fd3a4d-8698-4c18-bc97-7c7b8dc5f03b {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.390715] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-161c4975-42ad-448b-82f2-a5756d2edc48 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.404859] env[62096]: ERROR nova.compute.manager [None req-3fd8e1c2-0ecc-40e5-90b0-01e465c5f366 tempest-ServersV294TestFqdnHostnames-2113678617 tempest-ServersV294TestFqdnHostnames-2113678617-project-member] [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 4b8eb4ba-d577-4e7a-8576-502e273b223b, please check neutron logs for more information. [ 874.404859] env[62096]: ERROR nova.compute.manager [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] Traceback (most recent call last): [ 874.404859] env[62096]: ERROR nova.compute.manager [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 874.404859] env[62096]: ERROR nova.compute.manager [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] yield resources [ 874.404859] env[62096]: ERROR nova.compute.manager [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 874.404859] env[62096]: ERROR nova.compute.manager [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] self.driver.spawn(context, instance, image_meta, [ 874.404859] env[62096]: ERROR nova.compute.manager [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 874.404859] env[62096]: ERROR nova.compute.manager [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] self._vmops.spawn(context, instance, image_meta, injected_files, [ 874.404859] env[62096]: ERROR nova.compute.manager [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 874.404859] env[62096]: ERROR nova.compute.manager [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] vm_ref = self.build_virtual_machine(instance, [ 874.404859] env[62096]: ERROR nova.compute.manager [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 874.405303] env[62096]: ERROR nova.compute.manager [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] vif_infos = vmwarevif.get_vif_info(self._session, [ 874.405303] env[62096]: ERROR nova.compute.manager [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 874.405303] env[62096]: ERROR nova.compute.manager [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] for vif in network_info: [ 874.405303] env[62096]: ERROR nova.compute.manager [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 874.405303] env[62096]: ERROR nova.compute.manager [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] return self._sync_wrapper(fn, *args, **kwargs) [ 874.405303] env[62096]: ERROR nova.compute.manager [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 874.405303] env[62096]: ERROR nova.compute.manager [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] self.wait() [ 874.405303] env[62096]: ERROR nova.compute.manager [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 874.405303] env[62096]: ERROR nova.compute.manager [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] self[:] = self._gt.wait() [ 874.405303] env[62096]: ERROR nova.compute.manager [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 874.405303] env[62096]: ERROR nova.compute.manager [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] return self._exit_event.wait() [ 874.405303] env[62096]: ERROR nova.compute.manager [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 874.405303] env[62096]: ERROR nova.compute.manager [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] current.throw(*self._exc) [ 874.405741] env[62096]: ERROR nova.compute.manager [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 874.405741] env[62096]: ERROR nova.compute.manager [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] result = function(*args, **kwargs) [ 874.405741] env[62096]: ERROR nova.compute.manager [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 874.405741] env[62096]: ERROR nova.compute.manager [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] return func(*args, **kwargs) [ 874.405741] env[62096]: ERROR nova.compute.manager [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 874.405741] env[62096]: ERROR nova.compute.manager [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] raise e [ 874.405741] env[62096]: ERROR nova.compute.manager [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 874.405741] env[62096]: ERROR nova.compute.manager [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] nwinfo = self.network_api.allocate_for_instance( [ 874.405741] env[62096]: ERROR nova.compute.manager [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 874.405741] env[62096]: ERROR nova.compute.manager [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] created_port_ids = self._update_ports_for_instance( [ 874.405741] env[62096]: ERROR nova.compute.manager [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 874.405741] env[62096]: ERROR nova.compute.manager [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] with excutils.save_and_reraise_exception(): [ 874.405741] env[62096]: ERROR nova.compute.manager [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 874.406134] env[62096]: ERROR nova.compute.manager [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] self.force_reraise() [ 874.406134] env[62096]: ERROR nova.compute.manager [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 874.406134] env[62096]: ERROR nova.compute.manager [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] raise self.value [ 874.406134] env[62096]: ERROR nova.compute.manager [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 874.406134] env[62096]: ERROR nova.compute.manager [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] updated_port = self._update_port( [ 874.406134] env[62096]: ERROR nova.compute.manager [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 874.406134] env[62096]: ERROR nova.compute.manager [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] _ensure_no_port_binding_failure(port) [ 874.406134] env[62096]: ERROR nova.compute.manager [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 874.406134] env[62096]: ERROR nova.compute.manager [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] raise exception.PortBindingFailed(port_id=port['id']) [ 874.406134] env[62096]: ERROR nova.compute.manager [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] nova.exception.PortBindingFailed: Binding failed for port 4b8eb4ba-d577-4e7a-8576-502e273b223b, please check neutron logs for more information. [ 874.406134] env[62096]: ERROR nova.compute.manager [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] [ 874.406801] env[62096]: INFO nova.compute.manager [None req-3fd8e1c2-0ecc-40e5-90b0-01e465c5f366 tempest-ServersV294TestFqdnHostnames-2113678617 tempest-ServersV294TestFqdnHostnames-2113678617-project-member] [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] Terminating instance [ 874.409211] env[62096]: DEBUG oslo_concurrency.lockutils [None req-3fd8e1c2-0ecc-40e5-90b0-01e465c5f366 tempest-ServersV294TestFqdnHostnames-2113678617 tempest-ServersV294TestFqdnHostnames-2113678617-project-member] Acquiring lock "refresh_cache-9b85f3a6-892f-4eb4-b0d8-aacaf6576c78" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 874.410122] env[62096]: DEBUG nova.network.neutron [req-5a6e85c2-dcef-43f9-b586-a6049c21d63f req-f9b4bb22-f68f-4815-b1a3-96993e060043 service nova] [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 874.485945] env[62096]: DEBUG nova.network.neutron [req-5a6e85c2-dcef-43f9-b586-a6049c21d63f req-f9b4bb22-f68f-4815-b1a3-96993e060043 service nova] [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 874.486639] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-5a6e85c2-dcef-43f9-b586-a6049c21d63f req-f9b4bb22-f68f-4815-b1a3-96993e060043 service nova] Expecting reply to msg c6383c0bc830421997bc9ab8e5d1bcf7 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 874.496831] env[62096]: DEBUG oslo_vmware.api [None req-7bd8e5dd-5e58-4625-8471-f91ad349cc84 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Task: {'id': task-397434, 'name': ReconfigVM_Task, 'duration_secs': 0.453272} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.497573] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c6383c0bc830421997bc9ab8e5d1bcf7 [ 874.498113] env[62096]: DEBUG nova.virt.vmwareapi.volumeops [None req-7bd8e5dd-5e58-4625-8471-f91ad349cc84 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] [instance: 5627f913-29d2-476e-8fde-8ea457cc56f2] Reconfigured VM instance instance-00000048 to attach disk [datastore2] 5627f913-29d2-476e-8fde-8ea457cc56f2/5627f913-29d2-476e-8fde-8ea457cc56f2.vmdk or device None with type sparse {{(pid=62096) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 874.498823] env[62096]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4bc5e767-1b59-413d-a880-a96e03c70bb0 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.504648] env[62096]: DEBUG oslo_vmware.api [None req-7bd8e5dd-5e58-4625-8471-f91ad349cc84 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Waiting for the task: (returnval){ [ 874.504648] env[62096]: value = "task-397437" [ 874.504648] env[62096]: _type = "Task" [ 874.504648] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.512517] env[62096]: DEBUG oslo_vmware.api [None req-7bd8e5dd-5e58-4625-8471-f91ad349cc84 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Task: {'id': task-397437, 'name': Rename_Task} progress is 5%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.594030] env[62096]: DEBUG oslo_vmware.api [None req-787cee2b-5948-46d6-9396-dac712351d47 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Task: {'id': task-397436, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.12264} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.594461] env[62096]: DEBUG nova.virt.vmwareapi.ds_util [None req-787cee2b-5948-46d6-9396-dac712351d47 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Deleted the datastore file {{(pid=62096) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 874.594776] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-787cee2b-5948-46d6-9396-dac712351d47 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] [instance: a7838568-9d47-4306-8bb6-9ad74ab1feb3] Deleted contents of the VM from datastore datastore2 {{(pid=62096) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 874.595056] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-787cee2b-5948-46d6-9396-dac712351d47 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] [instance: a7838568-9d47-4306-8bb6-9ad74ab1feb3] Instance destroyed {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 874.595357] env[62096]: INFO nova.compute.manager [None req-787cee2b-5948-46d6-9396-dac712351d47 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] [instance: a7838568-9d47-4306-8bb6-9ad74ab1feb3] Took 1.07 seconds to destroy the instance on the hypervisor. [ 874.595693] env[62096]: DEBUG oslo.service.loopingcall [None req-787cee2b-5948-46d6-9396-dac712351d47 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 874.595983] env[62096]: DEBUG nova.compute.manager [-] [instance: a7838568-9d47-4306-8bb6-9ad74ab1feb3] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 874.596205] env[62096]: DEBUG nova.network.neutron [-] [instance: a7838568-9d47-4306-8bb6-9ad74ab1feb3] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 874.610663] env[62096]: DEBUG nova.network.neutron [-] [instance: a7838568-9d47-4306-8bb6-9ad74ab1feb3] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 874.611312] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg ddec45d25d7d47838344971ee5c24f3e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 874.618672] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ddec45d25d7d47838344971ee5c24f3e [ 874.647226] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-9083a630-3722-46bd-841d-5e4bacb9e547 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Expecting reply to msg 769f3ae7b0c94bc581f3a2a8194046e3 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 874.656589] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 769f3ae7b0c94bc581f3a2a8194046e3 [ 874.663164] env[62096]: DEBUG nova.network.neutron [None req-5ad96e0f-1e23-473c-97f9-ade218cb855a tempest-InstanceActionsTestJSON-629359483 tempest-InstanceActionsTestJSON-629359483-project-member] [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 874.737573] env[62096]: DEBUG nova.network.neutron [None req-5ad96e0f-1e23-473c-97f9-ade218cb855a tempest-InstanceActionsTestJSON-629359483 tempest-InstanceActionsTestJSON-629359483-project-member] [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 874.737573] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-5ad96e0f-1e23-473c-97f9-ade218cb855a tempest-InstanceActionsTestJSON-629359483 tempest-InstanceActionsTestJSON-629359483-project-member] Expecting reply to msg f9c9e3b1d8904e1ca337fd34148cffe4 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 874.745441] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f9c9e3b1d8904e1ca337fd34148cffe4 [ 874.989763] env[62096]: DEBUG oslo_concurrency.lockutils [req-5a6e85c2-dcef-43f9-b586-a6049c21d63f req-f9b4bb22-f68f-4815-b1a3-96993e060043 service nova] Releasing lock "refresh_cache-9b85f3a6-892f-4eb4-b0d8-aacaf6576c78" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 874.990430] env[62096]: DEBUG oslo_concurrency.lockutils [None req-3fd8e1c2-0ecc-40e5-90b0-01e465c5f366 tempest-ServersV294TestFqdnHostnames-2113678617 tempest-ServersV294TestFqdnHostnames-2113678617-project-member] Acquired lock "refresh_cache-9b85f3a6-892f-4eb4-b0d8-aacaf6576c78" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 874.990776] env[62096]: DEBUG nova.network.neutron [None req-3fd8e1c2-0ecc-40e5-90b0-01e465c5f366 tempest-ServersV294TestFqdnHostnames-2113678617 tempest-ServersV294TestFqdnHostnames-2113678617-project-member] [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 874.991330] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-3fd8e1c2-0ecc-40e5-90b0-01e465c5f366 tempest-ServersV294TestFqdnHostnames-2113678617 tempest-ServersV294TestFqdnHostnames-2113678617-project-member] Expecting reply to msg 9b611534e3bc4082a7cbf866b9196bb9 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 874.998187] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9b611534e3bc4082a7cbf866b9196bb9 [ 875.013795] env[62096]: DEBUG oslo_vmware.api [None req-7bd8e5dd-5e58-4625-8471-f91ad349cc84 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Task: {'id': task-397437, 'name': Rename_Task, 'duration_secs': 0.338924} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.014201] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [None req-7bd8e5dd-5e58-4625-8471-f91ad349cc84 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] [instance: 5627f913-29d2-476e-8fde-8ea457cc56f2] Powering on the VM {{(pid=62096) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 875.014552] env[62096]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-08f423ad-cc99-4e49-9a4a-f407a90c1047 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.019731] env[62096]: DEBUG oslo_vmware.api [None req-7bd8e5dd-5e58-4625-8471-f91ad349cc84 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Waiting for the task: (returnval){ [ 875.019731] env[62096]: value = "task-397438" [ 875.019731] env[62096]: _type = "Task" [ 875.019731] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.026648] env[62096]: DEBUG oslo_vmware.api [None req-7bd8e5dd-5e58-4625-8471-f91ad349cc84 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Task: {'id': task-397438, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.114021] env[62096]: DEBUG nova.network.neutron [-] [instance: a7838568-9d47-4306-8bb6-9ad74ab1feb3] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 875.114759] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg ebf555f1a3f749a1819771655a9ee104 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 875.123575] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ebf555f1a3f749a1819771655a9ee104 [ 875.150216] env[62096]: DEBUG oslo_concurrency.lockutils [None req-9083a630-3722-46bd-841d-5e4bacb9e547 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.020s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 875.150757] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-9083a630-3722-46bd-841d-5e4bacb9e547 tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Expecting reply to msg 95fae657721a47f79991f2d41d708416 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 875.152315] env[62096]: DEBUG oslo_concurrency.lockutils [None req-0f19979c-d609-46af-b70a-3aa130c0212c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.568s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 875.153866] env[62096]: INFO nova.compute.claims [None req-0f19979c-d609-46af-b70a-3aa130c0212c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 875.155590] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-0f19979c-d609-46af-b70a-3aa130c0212c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg 8e2fa33305a24773b12624e98040b5e8 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 875.162856] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 95fae657721a47f79991f2d41d708416 [ 875.207179] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8e2fa33305a24773b12624e98040b5e8 [ 875.239296] env[62096]: DEBUG oslo_concurrency.lockutils [None req-5ad96e0f-1e23-473c-97f9-ade218cb855a tempest-InstanceActionsTestJSON-629359483 tempest-InstanceActionsTestJSON-629359483-project-member] Releasing lock "refresh_cache-6cc2a2c1-613d-40bd-a375-424b84b66ac9" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 875.239730] env[62096]: DEBUG nova.compute.manager [None req-5ad96e0f-1e23-473c-97f9-ade218cb855a tempest-InstanceActionsTestJSON-629359483 tempest-InstanceActionsTestJSON-629359483-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 875.240063] env[62096]: DEBUG nova.compute.manager [None req-5ad96e0f-1e23-473c-97f9-ade218cb855a tempest-InstanceActionsTestJSON-629359483 tempest-InstanceActionsTestJSON-629359483-project-member] [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 875.240359] env[62096]: DEBUG nova.network.neutron [None req-5ad96e0f-1e23-473c-97f9-ade218cb855a tempest-InstanceActionsTestJSON-629359483 tempest-InstanceActionsTestJSON-629359483-project-member] [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 875.256371] env[62096]: DEBUG nova.network.neutron [None req-5ad96e0f-1e23-473c-97f9-ade218cb855a tempest-InstanceActionsTestJSON-629359483 tempest-InstanceActionsTestJSON-629359483-project-member] [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 875.256914] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-5ad96e0f-1e23-473c-97f9-ade218cb855a tempest-InstanceActionsTestJSON-629359483 tempest-InstanceActionsTestJSON-629359483-project-member] Expecting reply to msg 9c146c4b67aa45bb9947a061b5306ad5 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 875.263314] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9c146c4b67aa45bb9947a061b5306ad5 [ 875.508158] env[62096]: DEBUG nova.network.neutron [None req-3fd8e1c2-0ecc-40e5-90b0-01e465c5f366 tempest-ServersV294TestFqdnHostnames-2113678617 tempest-ServersV294TestFqdnHostnames-2113678617-project-member] [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 875.530292] env[62096]: DEBUG oslo_vmware.api [None req-7bd8e5dd-5e58-4625-8471-f91ad349cc84 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Task: {'id': task-397438, 'name': PowerOnVM_Task, 'duration_secs': 0.396647} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.530535] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [None req-7bd8e5dd-5e58-4625-8471-f91ad349cc84 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] [instance: 5627f913-29d2-476e-8fde-8ea457cc56f2] Powered on the VM {{(pid=62096) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 875.530728] env[62096]: INFO nova.compute.manager [None req-7bd8e5dd-5e58-4625-8471-f91ad349cc84 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] [instance: 5627f913-29d2-476e-8fde-8ea457cc56f2] Took 4.32 seconds to spawn the instance on the hypervisor. [ 875.530900] env[62096]: DEBUG nova.compute.manager [None req-7bd8e5dd-5e58-4625-8471-f91ad349cc84 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] [instance: 5627f913-29d2-476e-8fde-8ea457cc56f2] Checking state {{(pid=62096) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 875.531640] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-595c217c-9560-4a12-a757-a220dd214a30 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.538792] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-7bd8e5dd-5e58-4625-8471-f91ad349cc84 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Expecting reply to msg 780409b420104a60b967bfb2a93871f3 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 875.568835] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 780409b420104a60b967bfb2a93871f3 [ 875.574692] env[62096]: DEBUG nova.network.neutron [None req-3fd8e1c2-0ecc-40e5-90b0-01e465c5f366 tempest-ServersV294TestFqdnHostnames-2113678617 tempest-ServersV294TestFqdnHostnames-2113678617-project-member] [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 875.575165] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-3fd8e1c2-0ecc-40e5-90b0-01e465c5f366 tempest-ServersV294TestFqdnHostnames-2113678617 tempest-ServersV294TestFqdnHostnames-2113678617-project-member] Expecting reply to msg 736872185e9e4cbb8511054b3e35e101 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 875.582285] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 736872185e9e4cbb8511054b3e35e101 [ 875.617583] env[62096]: INFO nova.compute.manager [-] [instance: a7838568-9d47-4306-8bb6-9ad74ab1feb3] Took 1.02 seconds to deallocate network for instance. [ 875.621411] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-787cee2b-5948-46d6-9396-dac712351d47 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Expecting reply to msg 2c4d61aed5b7491b8d7c9cbd1f203f85 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 875.646669] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2c4d61aed5b7491b8d7c9cbd1f203f85 [ 875.658669] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-0f19979c-d609-46af-b70a-3aa130c0212c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg 2616964bb142429fa47fbb9afbad1ced in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 875.666311] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2616964bb142429fa47fbb9afbad1ced [ 875.759331] env[62096]: DEBUG nova.network.neutron [None req-5ad96e0f-1e23-473c-97f9-ade218cb855a tempest-InstanceActionsTestJSON-629359483 tempest-InstanceActionsTestJSON-629359483-project-member] [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 875.759825] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-5ad96e0f-1e23-473c-97f9-ade218cb855a tempest-InstanceActionsTestJSON-629359483 tempest-InstanceActionsTestJSON-629359483-project-member] Expecting reply to msg 0b27800f89af477fbbdf11757becd5f3 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 875.767826] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0b27800f89af477fbbdf11757becd5f3 [ 875.896548] env[62096]: DEBUG nova.compute.manager [req-715c69d6-24c2-4e39-987f-b3b6f21168fa req-83536de8-678f-43c8-b94f-f5dd02140a9d service nova] [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] Received event network-vif-deleted-4b8eb4ba-d577-4e7a-8576-502e273b223b {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 876.046559] env[62096]: INFO nova.compute.manager [None req-7bd8e5dd-5e58-4625-8471-f91ad349cc84 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] [instance: 5627f913-29d2-476e-8fde-8ea457cc56f2] Took 24.32 seconds to build instance. [ 876.046856] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-7bd8e5dd-5e58-4625-8471-f91ad349cc84 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Expecting reply to msg 1dd77e2e9a0d46f7b40d8ae8f62eb7da in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 876.059732] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1dd77e2e9a0d46f7b40d8ae8f62eb7da [ 876.078159] env[62096]: DEBUG oslo_concurrency.lockutils [None req-3fd8e1c2-0ecc-40e5-90b0-01e465c5f366 tempest-ServersV294TestFqdnHostnames-2113678617 tempest-ServersV294TestFqdnHostnames-2113678617-project-member] Releasing lock "refresh_cache-9b85f3a6-892f-4eb4-b0d8-aacaf6576c78" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 876.078562] env[62096]: DEBUG nova.compute.manager [None req-3fd8e1c2-0ecc-40e5-90b0-01e465c5f366 tempest-ServersV294TestFqdnHostnames-2113678617 tempest-ServersV294TestFqdnHostnames-2113678617-project-member] [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] Start destroying the instance on the hypervisor. {{(pid=62096) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 876.078752] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-3fd8e1c2-0ecc-40e5-90b0-01e465c5f366 tempest-ServersV294TestFqdnHostnames-2113678617 tempest-ServersV294TestFqdnHostnames-2113678617-project-member] [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] Destroying instance {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 876.079025] env[62096]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-710c43ad-abb3-4181-b819-c9b51c69d62d {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.087757] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7cdf7b9-62ef-4d57-8afc-3a44327c8733 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.109339] env[62096]: WARNING nova.virt.vmwareapi.vmops [None req-3fd8e1c2-0ecc-40e5-90b0-01e465c5f366 tempest-ServersV294TestFqdnHostnames-2113678617 tempest-ServersV294TestFqdnHostnames-2113678617-project-member] [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78 could not be found. [ 876.109549] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-3fd8e1c2-0ecc-40e5-90b0-01e465c5f366 tempest-ServersV294TestFqdnHostnames-2113678617 tempest-ServersV294TestFqdnHostnames-2113678617-project-member] [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] Instance destroyed {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 876.109802] env[62096]: INFO nova.compute.manager [None req-3fd8e1c2-0ecc-40e5-90b0-01e465c5f366 tempest-ServersV294TestFqdnHostnames-2113678617 tempest-ServersV294TestFqdnHostnames-2113678617-project-member] [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] Took 0.03 seconds to destroy the instance on the hypervisor. [ 876.110050] env[62096]: DEBUG oslo.service.loopingcall [None req-3fd8e1c2-0ecc-40e5-90b0-01e465c5f366 tempest-ServersV294TestFqdnHostnames-2113678617 tempest-ServersV294TestFqdnHostnames-2113678617-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 876.110266] env[62096]: DEBUG nova.compute.manager [-] [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 876.110358] env[62096]: DEBUG nova.network.neutron [-] [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 876.123896] env[62096]: DEBUG oslo_concurrency.lockutils [None req-787cee2b-5948-46d6-9396-dac712351d47 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 876.124663] env[62096]: DEBUG nova.network.neutron [-] [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 876.125090] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg de73d15312314340a1538946db18430f in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 876.130876] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg de73d15312314340a1538946db18430f [ 876.262925] env[62096]: INFO nova.compute.manager [None req-5ad96e0f-1e23-473c-97f9-ade218cb855a tempest-InstanceActionsTestJSON-629359483 tempest-InstanceActionsTestJSON-629359483-project-member] [instance: 6cc2a2c1-613d-40bd-a375-424b84b66ac9] Took 1.02 seconds to deallocate network for instance. [ 876.264585] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-5ad96e0f-1e23-473c-97f9-ade218cb855a tempest-InstanceActionsTestJSON-629359483 tempest-InstanceActionsTestJSON-629359483-project-member] Expecting reply to msg 6f74e1d13b9f43aca1b2f034385c58de in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 876.298390] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6f74e1d13b9f43aca1b2f034385c58de [ 876.356792] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69f8f8fb-cc86-4e5c-90d0-89560c2a0c04 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.365430] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cc4977a-ba2d-466b-8844-87b8130b01c4 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.395446] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43165646-97f3-4a73-b2d2-6e50930ba50d {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.402389] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-682361db-5230-4846-aee9-93122d4c5ce0 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.415279] env[62096]: DEBUG nova.compute.provider_tree [None req-0f19979c-d609-46af-b70a-3aa130c0212c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 876.415777] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-0f19979c-d609-46af-b70a-3aa130c0212c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg a0f89647058847589a81521382ab87aa in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 876.422760] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a0f89647058847589a81521382ab87aa [ 876.548675] env[62096]: DEBUG oslo_concurrency.lockutils [None req-7bd8e5dd-5e58-4625-8471-f91ad349cc84 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Lock "5627f913-29d2-476e-8fde-8ea457cc56f2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 95.722s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 876.549240] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-423ed1ce-80ce-46c1-a5f7-b083eaadd697 tempest-ServerTagsTestJSON-1314597396 tempest-ServerTagsTestJSON-1314597396-project-member] Expecting reply to msg 0347bb39e4094f5889f2affbf0e19a13 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 876.557003] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0347bb39e4094f5889f2affbf0e19a13 [ 876.569488] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a49958a4-6510-4969-b280-c49ebf520e10 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Expecting reply to msg 79183f63f74f42a9ad639de6d648762f in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 876.578236] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 79183f63f74f42a9ad639de6d648762f [ 876.626956] env[62096]: DEBUG nova.network.neutron [-] [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 876.627405] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 5844b69dbc494336a9a8ecedc8889a0d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 876.636031] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5844b69dbc494336a9a8ecedc8889a0d [ 876.769522] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-5ad96e0f-1e23-473c-97f9-ade218cb855a tempest-InstanceActionsTestJSON-629359483 tempest-InstanceActionsTestJSON-629359483-project-member] Expecting reply to msg b84be102968f4aecaaa030c04f96d331 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 876.799227] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b84be102968f4aecaaa030c04f96d331 [ 876.919296] env[62096]: DEBUG nova.scheduler.client.report [None req-0f19979c-d609-46af-b70a-3aa130c0212c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 876.921726] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-0f19979c-d609-46af-b70a-3aa130c0212c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg a5059d02852145f0882588ca2b7f2cee in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 876.935337] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a5059d02852145f0882588ca2b7f2cee [ 877.051735] env[62096]: DEBUG nova.compute.manager [None req-423ed1ce-80ce-46c1-a5f7-b083eaadd697 tempest-ServerTagsTestJSON-1314597396 tempest-ServerTagsTestJSON-1314597396-project-member] [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] Starting instance... {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 877.053893] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-423ed1ce-80ce-46c1-a5f7-b083eaadd697 tempest-ServerTagsTestJSON-1314597396 tempest-ServerTagsTestJSON-1314597396-project-member] Expecting reply to msg d6c134e0529d4fa9ac455298f56e87ed in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 877.071115] env[62096]: INFO nova.compute.manager [None req-a49958a4-6510-4969-b280-c49ebf520e10 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] [instance: 5627f913-29d2-476e-8fde-8ea457cc56f2] Rebuilding instance [ 877.087132] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d6c134e0529d4fa9ac455298f56e87ed [ 877.106484] env[62096]: DEBUG nova.compute.manager [None req-a49958a4-6510-4969-b280-c49ebf520e10 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] [instance: 5627f913-29d2-476e-8fde-8ea457cc56f2] Checking state {{(pid=62096) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 877.107856] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-765487bc-2cb6-474d-ba96-af82b1c1deba {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.116027] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a49958a4-6510-4969-b280-c49ebf520e10 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Expecting reply to msg 86f0b235db6d48caa596b0a39cd24dae in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 877.129780] env[62096]: INFO nova.compute.manager [-] [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] Took 1.02 seconds to deallocate network for instance. [ 877.131707] env[62096]: DEBUG nova.compute.claims [None req-3fd8e1c2-0ecc-40e5-90b0-01e465c5f366 tempest-ServersV294TestFqdnHostnames-2113678617 tempest-ServersV294TestFqdnHostnames-2113678617-project-member] [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] Aborting claim: {{(pid=62096) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 877.131887] env[62096]: DEBUG oslo_concurrency.lockutils [None req-3fd8e1c2-0ecc-40e5-90b0-01e465c5f366 tempest-ServersV294TestFqdnHostnames-2113678617 tempest-ServersV294TestFqdnHostnames-2113678617-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 877.143933] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 86f0b235db6d48caa596b0a39cd24dae [ 877.293833] env[62096]: INFO nova.scheduler.client.report [None req-5ad96e0f-1e23-473c-97f9-ade218cb855a tempest-InstanceActionsTestJSON-629359483 tempest-InstanceActionsTestJSON-629359483-project-member] Deleted allocations for instance 6cc2a2c1-613d-40bd-a375-424b84b66ac9 [ 877.298807] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-5ad96e0f-1e23-473c-97f9-ade218cb855a tempest-InstanceActionsTestJSON-629359483 tempest-InstanceActionsTestJSON-629359483-project-member] Expecting reply to msg 4fd1c3bf01cb4440a0fbb1dc1d7450b6 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 877.309805] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4fd1c3bf01cb4440a0fbb1dc1d7450b6 [ 877.424483] env[62096]: DEBUG oslo_concurrency.lockutils [None req-0f19979c-d609-46af-b70a-3aa130c0212c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.272s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 877.425019] env[62096]: DEBUG nova.compute.manager [None req-0f19979c-d609-46af-b70a-3aa130c0212c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] Start building networks asynchronously for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 877.426632] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-0f19979c-d609-46af-b70a-3aa130c0212c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg 76e13b7973cd4bc6a443b0b57c736641 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 877.428030] env[62096]: DEBUG oslo_concurrency.lockutils [None req-37030ed6-9fb4-44d5-b2b3-bbe259fd74a3 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.971s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 877.429399] env[62096]: INFO nova.compute.claims [None req-37030ed6-9fb4-44d5-b2b3-bbe259fd74a3 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: 60bc5e14-f495-48da-8db0-54d75b523822] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 877.430845] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-37030ed6-9fb4-44d5-b2b3-bbe259fd74a3 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Expecting reply to msg 503c022ce8184fd5beb93566de85f2e3 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 877.456532] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 76e13b7973cd4bc6a443b0b57c736641 [ 877.464662] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 503c022ce8184fd5beb93566de85f2e3 [ 877.570157] env[62096]: DEBUG oslo_concurrency.lockutils [None req-423ed1ce-80ce-46c1-a5f7-b083eaadd697 tempest-ServerTagsTestJSON-1314597396 tempest-ServerTagsTestJSON-1314597396-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 877.619330] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [None req-a49958a4-6510-4969-b280-c49ebf520e10 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] [instance: 5627f913-29d2-476e-8fde-8ea457cc56f2] Powering off the VM {{(pid=62096) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 877.619620] env[62096]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-55875d95-89bd-40e9-a714-83bdb63c8ab7 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.626888] env[62096]: DEBUG oslo_vmware.api [None req-a49958a4-6510-4969-b280-c49ebf520e10 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Waiting for the task: (returnval){ [ 877.626888] env[62096]: value = "task-397439" [ 877.626888] env[62096]: _type = "Task" [ 877.626888] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.634696] env[62096]: DEBUG oslo_vmware.api [None req-a49958a4-6510-4969-b280-c49ebf520e10 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Task: {'id': task-397439, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.801256] env[62096]: DEBUG oslo_concurrency.lockutils [None req-5ad96e0f-1e23-473c-97f9-ade218cb855a tempest-InstanceActionsTestJSON-629359483 tempest-InstanceActionsTestJSON-629359483-project-member] Lock "6cc2a2c1-613d-40bd-a375-424b84b66ac9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 136.130s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 877.802230] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b46ae41d-7569-429f-b0df-d60640b27629 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Expecting reply to msg 6571903ae0004c11b2388ecddd548b84 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 877.814438] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6571903ae0004c11b2388ecddd548b84 [ 877.933679] env[62096]: DEBUG nova.compute.utils [None req-0f19979c-d609-46af-b70a-3aa130c0212c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Using /dev/sd instead of None {{(pid=62096) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 877.934568] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-0f19979c-d609-46af-b70a-3aa130c0212c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg ac860a11dbe3479e8eb7fa1da63f8539 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 877.935651] env[62096]: DEBUG nova.compute.manager [None req-0f19979c-d609-46af-b70a-3aa130c0212c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] Allocating IP information in the background. {{(pid=62096) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 877.935938] env[62096]: DEBUG nova.network.neutron [None req-0f19979c-d609-46af-b70a-3aa130c0212c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] allocate_for_instance() {{(pid=62096) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 877.939256] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-37030ed6-9fb4-44d5-b2b3-bbe259fd74a3 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Expecting reply to msg daca60ef4c0c471cba478b69e00c8c39 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 877.946549] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ac860a11dbe3479e8eb7fa1da63f8539 [ 877.947176] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg daca60ef4c0c471cba478b69e00c8c39 [ 877.984638] env[62096]: DEBUG nova.policy [None req-0f19979c-d609-46af-b70a-3aa130c0212c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6d00589b1aa24dd7beb7c3ac5cb2a8ca', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bffafc9dc83d477d823cd7364968f48a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62096) authorize /opt/stack/nova/nova/policy.py:203}} [ 878.136387] env[62096]: DEBUG oslo_vmware.api [None req-a49958a4-6510-4969-b280-c49ebf520e10 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Task: {'id': task-397439, 'name': PowerOffVM_Task, 'duration_secs': 0.191318} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.136673] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [None req-a49958a4-6510-4969-b280-c49ebf520e10 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] [instance: 5627f913-29d2-476e-8fde-8ea457cc56f2] Powered off the VM {{(pid=62096) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 878.136892] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-a49958a4-6510-4969-b280-c49ebf520e10 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] [instance: 5627f913-29d2-476e-8fde-8ea457cc56f2] Destroying instance {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 878.137636] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af78521c-a2b7-439c-8211-88aded17d824 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.143932] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-a49958a4-6510-4969-b280-c49ebf520e10 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] [instance: 5627f913-29d2-476e-8fde-8ea457cc56f2] Unregistering the VM {{(pid=62096) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 878.144195] env[62096]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-997946f5-86e6-478c-a856-2f526b4a5238 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.166585] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-a49958a4-6510-4969-b280-c49ebf520e10 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] [instance: 5627f913-29d2-476e-8fde-8ea457cc56f2] Unregistered the VM {{(pid=62096) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 878.166875] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-a49958a4-6510-4969-b280-c49ebf520e10 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] [instance: 5627f913-29d2-476e-8fde-8ea457cc56f2] Deleting contents of the VM from datastore datastore2 {{(pid=62096) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 878.167059] env[62096]: DEBUG nova.virt.vmwareapi.ds_util [None req-a49958a4-6510-4969-b280-c49ebf520e10 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Deleting the datastore file [datastore2] 5627f913-29d2-476e-8fde-8ea457cc56f2 {{(pid=62096) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 878.167331] env[62096]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c9614432-fe95-4aea-bde7-c14f8ad7c1d8 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.172866] env[62096]: DEBUG oslo_vmware.api [None req-a49958a4-6510-4969-b280-c49ebf520e10 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Waiting for the task: (returnval){ [ 878.172866] env[62096]: value = "task-397441" [ 878.172866] env[62096]: _type = "Task" [ 878.172866] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.180599] env[62096]: DEBUG oslo_vmware.api [None req-a49958a4-6510-4969-b280-c49ebf520e10 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Task: {'id': task-397441, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.257496] env[62096]: DEBUG nova.network.neutron [None req-0f19979c-d609-46af-b70a-3aa130c0212c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] Successfully created port: 92e9c134-3671-42c7-b549-6f5262e08e26 {{(pid=62096) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 878.305252] env[62096]: DEBUG nova.compute.manager [None req-b46ae41d-7569-429f-b0df-d60640b27629 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] Starting instance... {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 878.307228] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b46ae41d-7569-429f-b0df-d60640b27629 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Expecting reply to msg f02d33c659274a86beab2cc3c2801b17 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 878.343546] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f02d33c659274a86beab2cc3c2801b17 [ 878.441163] env[62096]: DEBUG nova.compute.manager [None req-0f19979c-d609-46af-b70a-3aa130c0212c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] Start building block device mappings for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 878.442997] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-0f19979c-d609-46af-b70a-3aa130c0212c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg 39cf26d26c6a45edad15ce4232147c83 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 878.482221] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 39cf26d26c6a45edad15ce4232147c83 [ 878.656653] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f193937-ae7a-440d-9f54-91c5c7021a17 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.664751] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a3ce72f-ecad-47fe-85ed-cc3ed99aa342 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.698780] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d3c8051-7d62-434f-afb2-bc91aa19e0a2 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.709751] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cf36353-4c9d-47ed-8fac-c258c413685b {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.714619] env[62096]: DEBUG oslo_vmware.api [None req-a49958a4-6510-4969-b280-c49ebf520e10 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Task: {'id': task-397441, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.094474} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.714921] env[62096]: DEBUG nova.virt.vmwareapi.ds_util [None req-a49958a4-6510-4969-b280-c49ebf520e10 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Deleted the datastore file {{(pid=62096) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 878.715335] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-a49958a4-6510-4969-b280-c49ebf520e10 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] [instance: 5627f913-29d2-476e-8fde-8ea457cc56f2] Deleted contents of the VM from datastore datastore2 {{(pid=62096) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 878.715584] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-a49958a4-6510-4969-b280-c49ebf520e10 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] [instance: 5627f913-29d2-476e-8fde-8ea457cc56f2] Instance destroyed {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 878.717567] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a49958a4-6510-4969-b280-c49ebf520e10 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Expecting reply to msg 556ba85d97444ca5bad5e1853a53bf54 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 878.730175] env[62096]: DEBUG nova.compute.provider_tree [None req-37030ed6-9fb4-44d5-b2b3-bbe259fd74a3 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 878.730891] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-37030ed6-9fb4-44d5-b2b3-bbe259fd74a3 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Expecting reply to msg 0f3e14ad56734292be84e7c51e4b97e9 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 878.742380] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0f3e14ad56734292be84e7c51e4b97e9 [ 878.758961] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 556ba85d97444ca5bad5e1853a53bf54 [ 878.829293] env[62096]: DEBUG oslo_concurrency.lockutils [None req-b46ae41d-7569-429f-b0df-d60640b27629 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 878.951453] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-0f19979c-d609-46af-b70a-3aa130c0212c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg 012058ed72cb490791edbc0864007504 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 878.967200] env[62096]: DEBUG nova.compute.manager [req-ad78065e-bb5c-4f1f-9023-7df0630f6ec2 req-52bcad7a-2aec-4abc-98f2-31b99e8d4fbb service nova] [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] Received event network-changed-92e9c134-3671-42c7-b549-6f5262e08e26 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 878.967385] env[62096]: DEBUG nova.compute.manager [req-ad78065e-bb5c-4f1f-9023-7df0630f6ec2 req-52bcad7a-2aec-4abc-98f2-31b99e8d4fbb service nova] [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] Refreshing instance network info cache due to event network-changed-92e9c134-3671-42c7-b549-6f5262e08e26. {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 878.967588] env[62096]: DEBUG oslo_concurrency.lockutils [req-ad78065e-bb5c-4f1f-9023-7df0630f6ec2 req-52bcad7a-2aec-4abc-98f2-31b99e8d4fbb service nova] Acquiring lock "refresh_cache-9aae3fde-f55a-4e90-a9b5-4594051183f4" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 878.967721] env[62096]: DEBUG oslo_concurrency.lockutils [req-ad78065e-bb5c-4f1f-9023-7df0630f6ec2 req-52bcad7a-2aec-4abc-98f2-31b99e8d4fbb service nova] Acquired lock "refresh_cache-9aae3fde-f55a-4e90-a9b5-4594051183f4" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 878.967871] env[62096]: DEBUG nova.network.neutron [req-ad78065e-bb5c-4f1f-9023-7df0630f6ec2 req-52bcad7a-2aec-4abc-98f2-31b99e8d4fbb service nova] [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] Refreshing network info cache for port 92e9c134-3671-42c7-b549-6f5262e08e26 {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 878.968308] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-ad78065e-bb5c-4f1f-9023-7df0630f6ec2 req-52bcad7a-2aec-4abc-98f2-31b99e8d4fbb service nova] Expecting reply to msg 7f1e43c3868b42b5a4798d8cc4c70b83 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 878.978338] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7f1e43c3868b42b5a4798d8cc4c70b83 [ 878.990096] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 012058ed72cb490791edbc0864007504 [ 879.092402] env[62096]: ERROR nova.compute.manager [None req-0f19979c-d609-46af-b70a-3aa130c0212c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 92e9c134-3671-42c7-b549-6f5262e08e26, please check neutron logs for more information. [ 879.092402] env[62096]: ERROR nova.compute.manager Traceback (most recent call last): [ 879.092402] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 879.092402] env[62096]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 879.092402] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 879.092402] env[62096]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 879.092402] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 879.092402] env[62096]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 879.092402] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 879.092402] env[62096]: ERROR nova.compute.manager self.force_reraise() [ 879.092402] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 879.092402] env[62096]: ERROR nova.compute.manager raise self.value [ 879.092402] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 879.092402] env[62096]: ERROR nova.compute.manager updated_port = self._update_port( [ 879.092402] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 879.092402] env[62096]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 879.092912] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 879.092912] env[62096]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 879.092912] env[62096]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 92e9c134-3671-42c7-b549-6f5262e08e26, please check neutron logs for more information. [ 879.092912] env[62096]: ERROR nova.compute.manager [ 879.092912] env[62096]: Traceback (most recent call last): [ 879.092912] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 879.092912] env[62096]: listener.cb(fileno) [ 879.092912] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 879.092912] env[62096]: result = function(*args, **kwargs) [ 879.092912] env[62096]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 879.092912] env[62096]: return func(*args, **kwargs) [ 879.092912] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 879.092912] env[62096]: raise e [ 879.092912] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 879.092912] env[62096]: nwinfo = self.network_api.allocate_for_instance( [ 879.092912] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 879.092912] env[62096]: created_port_ids = self._update_ports_for_instance( [ 879.092912] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 879.092912] env[62096]: with excutils.save_and_reraise_exception(): [ 879.092912] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 879.092912] env[62096]: self.force_reraise() [ 879.092912] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 879.092912] env[62096]: raise self.value [ 879.092912] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 879.092912] env[62096]: updated_port = self._update_port( [ 879.092912] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 879.092912] env[62096]: _ensure_no_port_binding_failure(port) [ 879.092912] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 879.092912] env[62096]: raise exception.PortBindingFailed(port_id=port['id']) [ 879.093761] env[62096]: nova.exception.PortBindingFailed: Binding failed for port 92e9c134-3671-42c7-b549-6f5262e08e26, please check neutron logs for more information. [ 879.093761] env[62096]: Removing descriptor: 14 [ 879.222308] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a49958a4-6510-4969-b280-c49ebf520e10 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Expecting reply to msg 4b64da31722048a5baa05ba87a7ccb9f in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 879.233773] env[62096]: DEBUG nova.scheduler.client.report [None req-37030ed6-9fb4-44d5-b2b3-bbe259fd74a3 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 879.236183] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-37030ed6-9fb4-44d5-b2b3-bbe259fd74a3 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Expecting reply to msg 56f3ffa07f3a4ef0bb8e320fa6e77119 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 879.251519] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 56f3ffa07f3a4ef0bb8e320fa6e77119 [ 879.253543] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4b64da31722048a5baa05ba87a7ccb9f [ 879.455577] env[62096]: DEBUG nova.compute.manager [None req-0f19979c-d609-46af-b70a-3aa130c0212c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] Start spawning the instance on the hypervisor. {{(pid=62096) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 879.485045] env[62096]: DEBUG nova.virt.hardware [None req-0f19979c-d609-46af-b70a-3aa130c0212c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-19T11:52:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-19T11:51:55Z,direct_url=,disk_format='vmdk',id=fb764baa-3805-45c4-a694-aa91b0932110,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bd76085d004b408bbcf1ebf76ba2593e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-19T11:51:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 879.485369] env[62096]: DEBUG nova.virt.hardware [None req-0f19979c-d609-46af-b70a-3aa130c0212c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Flavor limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 879.485548] env[62096]: DEBUG nova.virt.hardware [None req-0f19979c-d609-46af-b70a-3aa130c0212c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Image limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 879.485725] env[62096]: DEBUG nova.virt.hardware [None req-0f19979c-d609-46af-b70a-3aa130c0212c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Flavor pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 879.485864] env[62096]: DEBUG nova.virt.hardware [None req-0f19979c-d609-46af-b70a-3aa130c0212c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Image pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 879.486006] env[62096]: DEBUG nova.virt.hardware [None req-0f19979c-d609-46af-b70a-3aa130c0212c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 879.486206] env[62096]: DEBUG nova.virt.hardware [None req-0f19979c-d609-46af-b70a-3aa130c0212c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 879.486355] env[62096]: DEBUG nova.virt.hardware [None req-0f19979c-d609-46af-b70a-3aa130c0212c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 879.486511] env[62096]: DEBUG nova.virt.hardware [None req-0f19979c-d609-46af-b70a-3aa130c0212c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Got 1 possible topologies {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 879.486660] env[62096]: DEBUG nova.virt.hardware [None req-0f19979c-d609-46af-b70a-3aa130c0212c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 879.486830] env[62096]: DEBUG nova.virt.hardware [None req-0f19979c-d609-46af-b70a-3aa130c0212c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 879.487981] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2ba938f-8dff-4544-aac0-3a6eed3eff4b {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.491288] env[62096]: DEBUG nova.network.neutron [req-ad78065e-bb5c-4f1f-9023-7df0630f6ec2 req-52bcad7a-2aec-4abc-98f2-31b99e8d4fbb service nova] [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 879.497937] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3efcfc8-a86c-4cc4-b715-780c6002cc8f {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.518956] env[62096]: ERROR nova.compute.manager [None req-0f19979c-d609-46af-b70a-3aa130c0212c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 92e9c134-3671-42c7-b549-6f5262e08e26, please check neutron logs for more information. [ 879.518956] env[62096]: ERROR nova.compute.manager [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] Traceback (most recent call last): [ 879.518956] env[62096]: ERROR nova.compute.manager [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 879.518956] env[62096]: ERROR nova.compute.manager [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] yield resources [ 879.518956] env[62096]: ERROR nova.compute.manager [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 879.518956] env[62096]: ERROR nova.compute.manager [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] self.driver.spawn(context, instance, image_meta, [ 879.518956] env[62096]: ERROR nova.compute.manager [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 879.518956] env[62096]: ERROR nova.compute.manager [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 879.518956] env[62096]: ERROR nova.compute.manager [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 879.518956] env[62096]: ERROR nova.compute.manager [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] vm_ref = self.build_virtual_machine(instance, [ 879.518956] env[62096]: ERROR nova.compute.manager [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 879.519320] env[62096]: ERROR nova.compute.manager [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] vif_infos = vmwarevif.get_vif_info(self._session, [ 879.519320] env[62096]: ERROR nova.compute.manager [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 879.519320] env[62096]: ERROR nova.compute.manager [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] for vif in network_info: [ 879.519320] env[62096]: ERROR nova.compute.manager [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 879.519320] env[62096]: ERROR nova.compute.manager [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] return self._sync_wrapper(fn, *args, **kwargs) [ 879.519320] env[62096]: ERROR nova.compute.manager [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 879.519320] env[62096]: ERROR nova.compute.manager [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] self.wait() [ 879.519320] env[62096]: ERROR nova.compute.manager [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 879.519320] env[62096]: ERROR nova.compute.manager [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] self[:] = self._gt.wait() [ 879.519320] env[62096]: ERROR nova.compute.manager [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 879.519320] env[62096]: ERROR nova.compute.manager [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] return self._exit_event.wait() [ 879.519320] env[62096]: ERROR nova.compute.manager [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 879.519320] env[62096]: ERROR nova.compute.manager [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] current.throw(*self._exc) [ 879.519757] env[62096]: ERROR nova.compute.manager [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 879.519757] env[62096]: ERROR nova.compute.manager [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] result = function(*args, **kwargs) [ 879.519757] env[62096]: ERROR nova.compute.manager [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 879.519757] env[62096]: ERROR nova.compute.manager [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] return func(*args, **kwargs) [ 879.519757] env[62096]: ERROR nova.compute.manager [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 879.519757] env[62096]: ERROR nova.compute.manager [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] raise e [ 879.519757] env[62096]: ERROR nova.compute.manager [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 879.519757] env[62096]: ERROR nova.compute.manager [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] nwinfo = self.network_api.allocate_for_instance( [ 879.519757] env[62096]: ERROR nova.compute.manager [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 879.519757] env[62096]: ERROR nova.compute.manager [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] created_port_ids = self._update_ports_for_instance( [ 879.519757] env[62096]: ERROR nova.compute.manager [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 879.519757] env[62096]: ERROR nova.compute.manager [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] with excutils.save_and_reraise_exception(): [ 879.519757] env[62096]: ERROR nova.compute.manager [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 879.520151] env[62096]: ERROR nova.compute.manager [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] self.force_reraise() [ 879.520151] env[62096]: ERROR nova.compute.manager [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 879.520151] env[62096]: ERROR nova.compute.manager [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] raise self.value [ 879.520151] env[62096]: ERROR nova.compute.manager [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 879.520151] env[62096]: ERROR nova.compute.manager [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] updated_port = self._update_port( [ 879.520151] env[62096]: ERROR nova.compute.manager [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 879.520151] env[62096]: ERROR nova.compute.manager [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] _ensure_no_port_binding_failure(port) [ 879.520151] env[62096]: ERROR nova.compute.manager [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 879.520151] env[62096]: ERROR nova.compute.manager [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] raise exception.PortBindingFailed(port_id=port['id']) [ 879.520151] env[62096]: ERROR nova.compute.manager [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] nova.exception.PortBindingFailed: Binding failed for port 92e9c134-3671-42c7-b549-6f5262e08e26, please check neutron logs for more information. [ 879.520151] env[62096]: ERROR nova.compute.manager [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] [ 879.520151] env[62096]: INFO nova.compute.manager [None req-0f19979c-d609-46af-b70a-3aa130c0212c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] Terminating instance [ 879.520506] env[62096]: DEBUG oslo_concurrency.lockutils [None req-0f19979c-d609-46af-b70a-3aa130c0212c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Acquiring lock "refresh_cache-9aae3fde-f55a-4e90-a9b5-4594051183f4" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 879.581820] env[62096]: DEBUG nova.network.neutron [req-ad78065e-bb5c-4f1f-9023-7df0630f6ec2 req-52bcad7a-2aec-4abc-98f2-31b99e8d4fbb service nova] [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 879.582394] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-ad78065e-bb5c-4f1f-9023-7df0630f6ec2 req-52bcad7a-2aec-4abc-98f2-31b99e8d4fbb service nova] Expecting reply to msg 2ed19effbc124c9db0ed79f151f5daac in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 879.590920] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2ed19effbc124c9db0ed79f151f5daac [ 879.744140] env[62096]: DEBUG oslo_concurrency.lockutils [None req-37030ed6-9fb4-44d5-b2b3-bbe259fd74a3 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.313s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 879.744140] env[62096]: DEBUG nova.compute.manager [None req-37030ed6-9fb4-44d5-b2b3-bbe259fd74a3 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: 60bc5e14-f495-48da-8db0-54d75b523822] Start building networks asynchronously for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 879.744140] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-37030ed6-9fb4-44d5-b2b3-bbe259fd74a3 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Expecting reply to msg 60ab2ed2981d49edbaf04c8671544e3c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 879.745003] env[62096]: DEBUG oslo_concurrency.lockutils [None req-cc6c4fa9-fc85-41c0-a8a8-237fec5b300e tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.995s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 879.745913] env[62096]: DEBUG nova.objects.instance [None req-cc6c4fa9-fc85-41c0-a8a8-237fec5b300e tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Lazy-loading 'resources' on Instance uuid 250ef7e7-266b-451d-8627-9cce211d4e83 {{(pid=62096) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 879.746393] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-cc6c4fa9-fc85-41c0-a8a8-237fec5b300e tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Expecting reply to msg 8529c2abbcc94c9fa0dba75b6e379b65 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 879.757638] env[62096]: DEBUG nova.virt.hardware [None req-a49958a4-6510-4969-b280-c49ebf520e10 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-19T11:52:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-19T11:51:55Z,direct_url=,disk_format='vmdk',id=fb764baa-3805-45c4-a694-aa91b0932110,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bd76085d004b408bbcf1ebf76ba2593e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-19T11:51:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 879.757883] env[62096]: DEBUG nova.virt.hardware [None req-a49958a4-6510-4969-b280-c49ebf520e10 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Flavor limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 879.758043] env[62096]: DEBUG nova.virt.hardware [None req-a49958a4-6510-4969-b280-c49ebf520e10 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Image limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 879.758237] env[62096]: DEBUG nova.virt.hardware [None req-a49958a4-6510-4969-b280-c49ebf520e10 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Flavor pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 879.758386] env[62096]: DEBUG nova.virt.hardware [None req-a49958a4-6510-4969-b280-c49ebf520e10 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Image pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 879.758534] env[62096]: DEBUG nova.virt.hardware [None req-a49958a4-6510-4969-b280-c49ebf520e10 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 879.758737] env[62096]: DEBUG nova.virt.hardware [None req-a49958a4-6510-4969-b280-c49ebf520e10 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 879.758892] env[62096]: DEBUG nova.virt.hardware [None req-a49958a4-6510-4969-b280-c49ebf520e10 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 879.759055] env[62096]: DEBUG nova.virt.hardware [None req-a49958a4-6510-4969-b280-c49ebf520e10 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Got 1 possible topologies {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 879.759215] env[62096]: DEBUG nova.virt.hardware [None req-a49958a4-6510-4969-b280-c49ebf520e10 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 879.759383] env[62096]: DEBUG nova.virt.hardware [None req-a49958a4-6510-4969-b280-c49ebf520e10 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 879.760148] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8529c2abbcc94c9fa0dba75b6e379b65 [ 879.761393] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a2a8173-5144-460a-b17c-2013a52de326 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.770929] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a941cfc-c45c-4e48-a9f6-d053d1c9643b {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.791184] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 60ab2ed2981d49edbaf04c8671544e3c [ 879.792350] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-a49958a4-6510-4969-b280-c49ebf520e10 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] [instance: 5627f913-29d2-476e-8fde-8ea457cc56f2] Instance VIF info [] {{(pid=62096) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 879.799437] env[62096]: DEBUG oslo.service.loopingcall [None req-a49958a4-6510-4969-b280-c49ebf520e10 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 879.799915] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5627f913-29d2-476e-8fde-8ea457cc56f2] Creating VM on the ESX host {{(pid=62096) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 879.800161] env[62096]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5ffdc34c-1196-40fc-90d9-0135980974ee {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.817471] env[62096]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 879.817471] env[62096]: value = "task-397442" [ 879.817471] env[62096]: _type = "Task" [ 879.817471] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.824830] env[62096]: DEBUG oslo_vmware.api [-] Task: {'id': task-397442, 'name': CreateVM_Task} progress is 0%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.084304] env[62096]: DEBUG oslo_concurrency.lockutils [req-ad78065e-bb5c-4f1f-9023-7df0630f6ec2 req-52bcad7a-2aec-4abc-98f2-31b99e8d4fbb service nova] Releasing lock "refresh_cache-9aae3fde-f55a-4e90-a9b5-4594051183f4" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 880.084710] env[62096]: DEBUG oslo_concurrency.lockutils [None req-0f19979c-d609-46af-b70a-3aa130c0212c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Acquired lock "refresh_cache-9aae3fde-f55a-4e90-a9b5-4594051183f4" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 880.084882] env[62096]: DEBUG nova.network.neutron [None req-0f19979c-d609-46af-b70a-3aa130c0212c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 880.085349] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-0f19979c-d609-46af-b70a-3aa130c0212c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg fa4fa24f485c4655b84d292539ba1f7c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 880.092189] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fa4fa24f485c4655b84d292539ba1f7c [ 880.257373] env[62096]: DEBUG nova.compute.utils [None req-37030ed6-9fb4-44d5-b2b3-bbe259fd74a3 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Using /dev/sd instead of None {{(pid=62096) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 880.258026] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-37030ed6-9fb4-44d5-b2b3-bbe259fd74a3 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Expecting reply to msg a45fc232939c4be1afeb5239ec4bcb7a in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 880.266520] env[62096]: DEBUG nova.compute.manager [None req-37030ed6-9fb4-44d5-b2b3-bbe259fd74a3 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: 60bc5e14-f495-48da-8db0-54d75b523822] Allocating IP information in the background. {{(pid=62096) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 880.266702] env[62096]: DEBUG nova.network.neutron [None req-37030ed6-9fb4-44d5-b2b3-bbe259fd74a3 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: 60bc5e14-f495-48da-8db0-54d75b523822] allocate_for_instance() {{(pid=62096) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 880.277229] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a45fc232939c4be1afeb5239ec4bcb7a [ 880.332233] env[62096]: DEBUG oslo_vmware.api [-] Task: {'id': task-397442, 'name': CreateVM_Task, 'duration_secs': 0.234962} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.332342] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5627f913-29d2-476e-8fde-8ea457cc56f2] Created VM on the ESX host {{(pid=62096) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 880.332688] env[62096]: DEBUG oslo_concurrency.lockutils [None req-a49958a4-6510-4969-b280-c49ebf520e10 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 880.333336] env[62096]: DEBUG oslo_concurrency.lockutils [None req-a49958a4-6510-4969-b280-c49ebf520e10 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Acquired lock "[datastore2] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 880.333682] env[62096]: DEBUG oslo_concurrency.lockutils [None req-a49958a4-6510-4969-b280-c49ebf520e10 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 880.333930] env[62096]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4eea5ac6-a318-4eb4-a38d-3385b371870f {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.336864] env[62096]: DEBUG nova.policy [None req-37030ed6-9fb4-44d5-b2b3-bbe259fd74a3 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'eccf4b6b1d2b496796ad12d6caad16ed', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '53edc9a849714cedab5fcd7b03ca6916', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62096) authorize /opt/stack/nova/nova/policy.py:203}} [ 880.343687] env[62096]: DEBUG oslo_vmware.api [None req-a49958a4-6510-4969-b280-c49ebf520e10 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Waiting for the task: (returnval){ [ 880.343687] env[62096]: value = "session[5264b2d0-7e3b-3d3c-bfd1-bd630cad6eae]52ebdb18-fefa-5721-9c0a-a5e0ce13eef4" [ 880.343687] env[62096]: _type = "Task" [ 880.343687] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.354455] env[62096]: DEBUG oslo_vmware.api [None req-a49958a4-6510-4969-b280-c49ebf520e10 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Task: {'id': session[5264b2d0-7e3b-3d3c-bfd1-bd630cad6eae]52ebdb18-fefa-5721-9c0a-a5e0ce13eef4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.470541] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c4c882c-047d-4379-9076-4ea0cd8f6df6 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.480267] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8ccf0e1-4c2a-40a3-8b92-f432d2d5ff98 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.515407] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c575051-1359-4386-84aa-016792cb4699 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.522852] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95d59914-fc6c-4541-9293-6d0e25632d1e {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.538180] env[62096]: DEBUG nova.compute.provider_tree [None req-cc6c4fa9-fc85-41c0-a8a8-237fec5b300e tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 880.538941] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-cc6c4fa9-fc85-41c0-a8a8-237fec5b300e tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Expecting reply to msg 6c3d86ef8d14464a9b248deb7294754e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 880.546133] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6c3d86ef8d14464a9b248deb7294754e [ 880.608617] env[62096]: DEBUG nova.network.neutron [None req-0f19979c-d609-46af-b70a-3aa130c0212c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 880.614710] env[62096]: DEBUG nova.network.neutron [None req-37030ed6-9fb4-44d5-b2b3-bbe259fd74a3 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: 60bc5e14-f495-48da-8db0-54d75b523822] Successfully created port: 7e36f48d-97d8-4355-924b-977fd775b8b0 {{(pid=62096) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 880.733008] env[62096]: DEBUG nova.network.neutron [None req-0f19979c-d609-46af-b70a-3aa130c0212c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 880.733949] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-0f19979c-d609-46af-b70a-3aa130c0212c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg e174a942146d4345a544320e8a099391 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 880.742223] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e174a942146d4345a544320e8a099391 [ 880.767030] env[62096]: DEBUG nova.compute.manager [None req-37030ed6-9fb4-44d5-b2b3-bbe259fd74a3 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: 60bc5e14-f495-48da-8db0-54d75b523822] Start building block device mappings for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 880.768726] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-37030ed6-9fb4-44d5-b2b3-bbe259fd74a3 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Expecting reply to msg 102ab9ff910f419ab016ace8eb821181 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 880.803399] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 102ab9ff910f419ab016ace8eb821181 [ 880.854757] env[62096]: DEBUG oslo_vmware.api [None req-a49958a4-6510-4969-b280-c49ebf520e10 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Task: {'id': session[5264b2d0-7e3b-3d3c-bfd1-bd630cad6eae]52ebdb18-fefa-5721-9c0a-a5e0ce13eef4, 'name': SearchDatastore_Task, 'duration_secs': 0.008463} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.855072] env[62096]: DEBUG oslo_concurrency.lockutils [None req-a49958a4-6510-4969-b280-c49ebf520e10 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Releasing lock "[datastore2] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 880.855319] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-a49958a4-6510-4969-b280-c49ebf520e10 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] [instance: 5627f913-29d2-476e-8fde-8ea457cc56f2] Processing image fb764baa-3805-45c4-a694-aa91b0932110 {{(pid=62096) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 880.855519] env[62096]: DEBUG oslo_concurrency.lockutils [None req-a49958a4-6510-4969-b280-c49ebf520e10 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110/fb764baa-3805-45c4-a694-aa91b0932110.vmdk" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 880.855667] env[62096]: DEBUG oslo_concurrency.lockutils [None req-a49958a4-6510-4969-b280-c49ebf520e10 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Acquired lock "[datastore2] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110/fb764baa-3805-45c4-a694-aa91b0932110.vmdk" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 880.855839] env[62096]: DEBUG nova.virt.vmwareapi.ds_util [None req-a49958a4-6510-4969-b280-c49ebf520e10 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62096) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 880.856122] env[62096]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-526552c3-b1fc-4de3-8ad5-21c5d9cd3b26 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.863797] env[62096]: DEBUG nova.virt.vmwareapi.ds_util [None req-a49958a4-6510-4969-b280-c49ebf520e10 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62096) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 880.863931] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-a49958a4-6510-4969-b280-c49ebf520e10 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62096) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 880.864669] env[62096]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cb22d56a-beea-4b57-82b7-536d5b3a5945 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.871082] env[62096]: DEBUG oslo_vmware.api [None req-a49958a4-6510-4969-b280-c49ebf520e10 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Waiting for the task: (returnval){ [ 880.871082] env[62096]: value = "session[5264b2d0-7e3b-3d3c-bfd1-bd630cad6eae]5223b584-e748-97e2-f93b-840ce36242aa" [ 880.871082] env[62096]: _type = "Task" [ 880.871082] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.880558] env[62096]: DEBUG oslo_vmware.api [None req-a49958a4-6510-4969-b280-c49ebf520e10 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Task: {'id': session[5264b2d0-7e3b-3d3c-bfd1-bd630cad6eae]5223b584-e748-97e2-f93b-840ce36242aa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.038539] env[62096]: DEBUG nova.compute.manager [req-9de3bde4-a4ad-42ee-8cf0-3c95d96f14f7 req-8ff536d4-1a2d-45ab-b138-ceb1e98227ba service nova] [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] Received event network-vif-deleted-92e9c134-3671-42c7-b549-6f5262e08e26 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 881.041752] env[62096]: DEBUG nova.scheduler.client.report [None req-cc6c4fa9-fc85-41c0-a8a8-237fec5b300e tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 881.044176] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-cc6c4fa9-fc85-41c0-a8a8-237fec5b300e tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Expecting reply to msg cf004df652824af7bdd172d1f26ddb7a in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 881.058920] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cf004df652824af7bdd172d1f26ddb7a [ 881.242735] env[62096]: DEBUG oslo_concurrency.lockutils [None req-0f19979c-d609-46af-b70a-3aa130c0212c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Releasing lock "refresh_cache-9aae3fde-f55a-4e90-a9b5-4594051183f4" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 881.243230] env[62096]: DEBUG nova.compute.manager [None req-0f19979c-d609-46af-b70a-3aa130c0212c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] Start destroying the instance on the hypervisor. {{(pid=62096) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 881.243336] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-0f19979c-d609-46af-b70a-3aa130c0212c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] Destroying instance {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 881.243730] env[62096]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d6e35e11-e040-4ca1-9493-5ba7ff32b521 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.253242] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab76e9cd-465c-410d-858c-7f1d816bdf22 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.274436] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-37030ed6-9fb4-44d5-b2b3-bbe259fd74a3 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Expecting reply to msg afe87e746e4b4292bd7bea158b71d585 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 881.279713] env[62096]: WARNING nova.virt.vmwareapi.vmops [None req-0f19979c-d609-46af-b70a-3aa130c0212c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 9aae3fde-f55a-4e90-a9b5-4594051183f4 could not be found. [ 881.279926] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-0f19979c-d609-46af-b70a-3aa130c0212c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] Instance destroyed {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 881.280216] env[62096]: INFO nova.compute.manager [None req-0f19979c-d609-46af-b70a-3aa130c0212c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] Took 0.04 seconds to destroy the instance on the hypervisor. [ 881.280367] env[62096]: DEBUG oslo.service.loopingcall [None req-0f19979c-d609-46af-b70a-3aa130c0212c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 881.280823] env[62096]: DEBUG nova.compute.manager [-] [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 881.280968] env[62096]: DEBUG nova.network.neutron [-] [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 881.320927] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg afe87e746e4b4292bd7bea158b71d585 [ 881.332420] env[62096]: DEBUG nova.network.neutron [-] [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 881.332949] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 6037e6936401437cba6c96ca32241e40 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 881.340225] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6037e6936401437cba6c96ca32241e40 [ 881.382013] env[62096]: DEBUG oslo_vmware.api [None req-a49958a4-6510-4969-b280-c49ebf520e10 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Task: {'id': session[5264b2d0-7e3b-3d3c-bfd1-bd630cad6eae]5223b584-e748-97e2-f93b-840ce36242aa, 'name': SearchDatastore_Task, 'duration_secs': 0.00741} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.382779] env[62096]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e729c54e-1b34-41da-a77a-1f03a77a8a9f {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.393802] env[62096]: DEBUG oslo_vmware.api [None req-a49958a4-6510-4969-b280-c49ebf520e10 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Waiting for the task: (returnval){ [ 881.393802] env[62096]: value = "session[5264b2d0-7e3b-3d3c-bfd1-bd630cad6eae]5205ea39-a76e-32c7-3387-55bb7a8b5908" [ 881.393802] env[62096]: _type = "Task" [ 881.393802] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.394925] env[62096]: DEBUG oslo_vmware.api [None req-a49958a4-6510-4969-b280-c49ebf520e10 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Task: {'id': session[5264b2d0-7e3b-3d3c-bfd1-bd630cad6eae]5205ea39-a76e-32c7-3387-55bb7a8b5908, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.473053] env[62096]: ERROR nova.compute.manager [None req-37030ed6-9fb4-44d5-b2b3-bbe259fd74a3 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 7e36f48d-97d8-4355-924b-977fd775b8b0, please check neutron logs for more information. [ 881.473053] env[62096]: ERROR nova.compute.manager Traceback (most recent call last): [ 881.473053] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 881.473053] env[62096]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 881.473053] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 881.473053] env[62096]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 881.473053] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 881.473053] env[62096]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 881.473053] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 881.473053] env[62096]: ERROR nova.compute.manager self.force_reraise() [ 881.473053] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 881.473053] env[62096]: ERROR nova.compute.manager raise self.value [ 881.473053] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 881.473053] env[62096]: ERROR nova.compute.manager updated_port = self._update_port( [ 881.473053] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 881.473053] env[62096]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 881.473501] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 881.473501] env[62096]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 881.473501] env[62096]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 7e36f48d-97d8-4355-924b-977fd775b8b0, please check neutron logs for more information. [ 881.473501] env[62096]: ERROR nova.compute.manager [ 881.473501] env[62096]: Traceback (most recent call last): [ 881.473501] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 881.473501] env[62096]: listener.cb(fileno) [ 881.473501] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 881.473501] env[62096]: result = function(*args, **kwargs) [ 881.473501] env[62096]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 881.473501] env[62096]: return func(*args, **kwargs) [ 881.473501] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 881.473501] env[62096]: raise e [ 881.473501] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 881.473501] env[62096]: nwinfo = self.network_api.allocate_for_instance( [ 881.473501] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 881.473501] env[62096]: created_port_ids = self._update_ports_for_instance( [ 881.473501] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 881.473501] env[62096]: with excutils.save_and_reraise_exception(): [ 881.473501] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 881.473501] env[62096]: self.force_reraise() [ 881.473501] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 881.473501] env[62096]: raise self.value [ 881.473501] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 881.473501] env[62096]: updated_port = self._update_port( [ 881.473501] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 881.473501] env[62096]: _ensure_no_port_binding_failure(port) [ 881.473501] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 881.473501] env[62096]: raise exception.PortBindingFailed(port_id=port['id']) [ 881.474201] env[62096]: nova.exception.PortBindingFailed: Binding failed for port 7e36f48d-97d8-4355-924b-977fd775b8b0, please check neutron logs for more information. [ 881.474201] env[62096]: Removing descriptor: 14 [ 881.555045] env[62096]: DEBUG oslo_concurrency.lockutils [None req-cc6c4fa9-fc85-41c0-a8a8-237fec5b300e tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.810s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 881.557625] env[62096]: DEBUG oslo_concurrency.lockutils [None req-c70231a0-48a5-4b3b-b009-020c7f69a59b tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.612s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 881.559205] env[62096]: INFO nova.compute.claims [None req-c70231a0-48a5-4b3b-b009-020c7f69a59b tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 881.560931] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c70231a0-48a5-4b3b-b009-020c7f69a59b tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Expecting reply to msg c18f66ec70d54697ade63c655d692609 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 881.580507] env[62096]: INFO nova.scheduler.client.report [None req-cc6c4fa9-fc85-41c0-a8a8-237fec5b300e tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Deleted allocations for instance 250ef7e7-266b-451d-8627-9cce211d4e83 [ 881.583410] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-cc6c4fa9-fc85-41c0-a8a8-237fec5b300e tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Expecting reply to msg c06846df17234a919beab322a1ff9f29 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 881.606784] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c18f66ec70d54697ade63c655d692609 [ 881.659436] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c06846df17234a919beab322a1ff9f29 [ 881.782714] env[62096]: DEBUG nova.compute.manager [None req-37030ed6-9fb4-44d5-b2b3-bbe259fd74a3 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: 60bc5e14-f495-48da-8db0-54d75b523822] Start spawning the instance on the hypervisor. {{(pid=62096) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 881.807661] env[62096]: DEBUG nova.virt.hardware [None req-37030ed6-9fb4-44d5-b2b3-bbe259fd74a3 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-19T11:52:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-19T11:51:55Z,direct_url=,disk_format='vmdk',id=fb764baa-3805-45c4-a694-aa91b0932110,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bd76085d004b408bbcf1ebf76ba2593e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-19T11:51:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 881.807898] env[62096]: DEBUG nova.virt.hardware [None req-37030ed6-9fb4-44d5-b2b3-bbe259fd74a3 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Flavor limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 881.808064] env[62096]: DEBUG nova.virt.hardware [None req-37030ed6-9fb4-44d5-b2b3-bbe259fd74a3 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Image limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 881.808254] env[62096]: DEBUG nova.virt.hardware [None req-37030ed6-9fb4-44d5-b2b3-bbe259fd74a3 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Flavor pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 881.808395] env[62096]: DEBUG nova.virt.hardware [None req-37030ed6-9fb4-44d5-b2b3-bbe259fd74a3 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Image pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 881.808536] env[62096]: DEBUG nova.virt.hardware [None req-37030ed6-9fb4-44d5-b2b3-bbe259fd74a3 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 881.808739] env[62096]: DEBUG nova.virt.hardware [None req-37030ed6-9fb4-44d5-b2b3-bbe259fd74a3 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 881.808893] env[62096]: DEBUG nova.virt.hardware [None req-37030ed6-9fb4-44d5-b2b3-bbe259fd74a3 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 881.809060] env[62096]: DEBUG nova.virt.hardware [None req-37030ed6-9fb4-44d5-b2b3-bbe259fd74a3 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Got 1 possible topologies {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 881.809272] env[62096]: DEBUG nova.virt.hardware [None req-37030ed6-9fb4-44d5-b2b3-bbe259fd74a3 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 881.809745] env[62096]: DEBUG nova.virt.hardware [None req-37030ed6-9fb4-44d5-b2b3-bbe259fd74a3 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 881.810588] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b00e664a-eca3-49b0-887c-08abeb59e038 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.818671] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5af64c7b-0d37-4599-94d7-4d62f3f68f88 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.833190] env[62096]: ERROR nova.compute.manager [None req-37030ed6-9fb4-44d5-b2b3-bbe259fd74a3 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: 60bc5e14-f495-48da-8db0-54d75b523822] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 7e36f48d-97d8-4355-924b-977fd775b8b0, please check neutron logs for more information. [ 881.833190] env[62096]: ERROR nova.compute.manager [instance: 60bc5e14-f495-48da-8db0-54d75b523822] Traceback (most recent call last): [ 881.833190] env[62096]: ERROR nova.compute.manager [instance: 60bc5e14-f495-48da-8db0-54d75b523822] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 881.833190] env[62096]: ERROR nova.compute.manager [instance: 60bc5e14-f495-48da-8db0-54d75b523822] yield resources [ 881.833190] env[62096]: ERROR nova.compute.manager [instance: 60bc5e14-f495-48da-8db0-54d75b523822] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 881.833190] env[62096]: ERROR nova.compute.manager [instance: 60bc5e14-f495-48da-8db0-54d75b523822] self.driver.spawn(context, instance, image_meta, [ 881.833190] env[62096]: ERROR nova.compute.manager [instance: 60bc5e14-f495-48da-8db0-54d75b523822] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 881.833190] env[62096]: ERROR nova.compute.manager [instance: 60bc5e14-f495-48da-8db0-54d75b523822] self._vmops.spawn(context, instance, image_meta, injected_files, [ 881.833190] env[62096]: ERROR nova.compute.manager [instance: 60bc5e14-f495-48da-8db0-54d75b523822] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 881.833190] env[62096]: ERROR nova.compute.manager [instance: 60bc5e14-f495-48da-8db0-54d75b523822] vm_ref = self.build_virtual_machine(instance, [ 881.833190] env[62096]: ERROR nova.compute.manager [instance: 60bc5e14-f495-48da-8db0-54d75b523822] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 881.833515] env[62096]: ERROR nova.compute.manager [instance: 60bc5e14-f495-48da-8db0-54d75b523822] vif_infos = vmwarevif.get_vif_info(self._session, [ 881.833515] env[62096]: ERROR nova.compute.manager [instance: 60bc5e14-f495-48da-8db0-54d75b523822] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 881.833515] env[62096]: ERROR nova.compute.manager [instance: 60bc5e14-f495-48da-8db0-54d75b523822] for vif in network_info: [ 881.833515] env[62096]: ERROR nova.compute.manager [instance: 60bc5e14-f495-48da-8db0-54d75b523822] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 881.833515] env[62096]: ERROR nova.compute.manager [instance: 60bc5e14-f495-48da-8db0-54d75b523822] return self._sync_wrapper(fn, *args, **kwargs) [ 881.833515] env[62096]: ERROR nova.compute.manager [instance: 60bc5e14-f495-48da-8db0-54d75b523822] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 881.833515] env[62096]: ERROR nova.compute.manager [instance: 60bc5e14-f495-48da-8db0-54d75b523822] self.wait() [ 881.833515] env[62096]: ERROR nova.compute.manager [instance: 60bc5e14-f495-48da-8db0-54d75b523822] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 881.833515] env[62096]: ERROR nova.compute.manager [instance: 60bc5e14-f495-48da-8db0-54d75b523822] self[:] = self._gt.wait() [ 881.833515] env[62096]: ERROR nova.compute.manager [instance: 60bc5e14-f495-48da-8db0-54d75b523822] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 881.833515] env[62096]: ERROR nova.compute.manager [instance: 60bc5e14-f495-48da-8db0-54d75b523822] return self._exit_event.wait() [ 881.833515] env[62096]: ERROR nova.compute.manager [instance: 60bc5e14-f495-48da-8db0-54d75b523822] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 881.833515] env[62096]: ERROR nova.compute.manager [instance: 60bc5e14-f495-48da-8db0-54d75b523822] current.throw(*self._exc) [ 881.833829] env[62096]: ERROR nova.compute.manager [instance: 60bc5e14-f495-48da-8db0-54d75b523822] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 881.833829] env[62096]: ERROR nova.compute.manager [instance: 60bc5e14-f495-48da-8db0-54d75b523822] result = function(*args, **kwargs) [ 881.833829] env[62096]: ERROR nova.compute.manager [instance: 60bc5e14-f495-48da-8db0-54d75b523822] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 881.833829] env[62096]: ERROR nova.compute.manager [instance: 60bc5e14-f495-48da-8db0-54d75b523822] return func(*args, **kwargs) [ 881.833829] env[62096]: ERROR nova.compute.manager [instance: 60bc5e14-f495-48da-8db0-54d75b523822] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 881.833829] env[62096]: ERROR nova.compute.manager [instance: 60bc5e14-f495-48da-8db0-54d75b523822] raise e [ 881.833829] env[62096]: ERROR nova.compute.manager [instance: 60bc5e14-f495-48da-8db0-54d75b523822] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 881.833829] env[62096]: ERROR nova.compute.manager [instance: 60bc5e14-f495-48da-8db0-54d75b523822] nwinfo = self.network_api.allocate_for_instance( [ 881.833829] env[62096]: ERROR nova.compute.manager [instance: 60bc5e14-f495-48da-8db0-54d75b523822] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 881.833829] env[62096]: ERROR nova.compute.manager [instance: 60bc5e14-f495-48da-8db0-54d75b523822] created_port_ids = self._update_ports_for_instance( [ 881.833829] env[62096]: ERROR nova.compute.manager [instance: 60bc5e14-f495-48da-8db0-54d75b523822] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 881.833829] env[62096]: ERROR nova.compute.manager [instance: 60bc5e14-f495-48da-8db0-54d75b523822] with excutils.save_and_reraise_exception(): [ 881.833829] env[62096]: ERROR nova.compute.manager [instance: 60bc5e14-f495-48da-8db0-54d75b523822] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 881.834221] env[62096]: ERROR nova.compute.manager [instance: 60bc5e14-f495-48da-8db0-54d75b523822] self.force_reraise() [ 881.834221] env[62096]: ERROR nova.compute.manager [instance: 60bc5e14-f495-48da-8db0-54d75b523822] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 881.834221] env[62096]: ERROR nova.compute.manager [instance: 60bc5e14-f495-48da-8db0-54d75b523822] raise self.value [ 881.834221] env[62096]: ERROR nova.compute.manager [instance: 60bc5e14-f495-48da-8db0-54d75b523822] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 881.834221] env[62096]: ERROR nova.compute.manager [instance: 60bc5e14-f495-48da-8db0-54d75b523822] updated_port = self._update_port( [ 881.834221] env[62096]: ERROR nova.compute.manager [instance: 60bc5e14-f495-48da-8db0-54d75b523822] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 881.834221] env[62096]: ERROR nova.compute.manager [instance: 60bc5e14-f495-48da-8db0-54d75b523822] _ensure_no_port_binding_failure(port) [ 881.834221] env[62096]: ERROR nova.compute.manager [instance: 60bc5e14-f495-48da-8db0-54d75b523822] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 881.834221] env[62096]: ERROR nova.compute.manager [instance: 60bc5e14-f495-48da-8db0-54d75b523822] raise exception.PortBindingFailed(port_id=port['id']) [ 881.834221] env[62096]: ERROR nova.compute.manager [instance: 60bc5e14-f495-48da-8db0-54d75b523822] nova.exception.PortBindingFailed: Binding failed for port 7e36f48d-97d8-4355-924b-977fd775b8b0, please check neutron logs for more information. [ 881.834221] env[62096]: ERROR nova.compute.manager [instance: 60bc5e14-f495-48da-8db0-54d75b523822] [ 881.834221] env[62096]: INFO nova.compute.manager [None req-37030ed6-9fb4-44d5-b2b3-bbe259fd74a3 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: 60bc5e14-f495-48da-8db0-54d75b523822] Terminating instance [ 881.835050] env[62096]: DEBUG nova.network.neutron [-] [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 881.835439] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 2ca811942b434e38ae2fb42b4640e4b6 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 881.836365] env[62096]: DEBUG oslo_concurrency.lockutils [None req-37030ed6-9fb4-44d5-b2b3-bbe259fd74a3 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Acquiring lock "refresh_cache-60bc5e14-f495-48da-8db0-54d75b523822" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 881.836523] env[62096]: DEBUG oslo_concurrency.lockutils [None req-37030ed6-9fb4-44d5-b2b3-bbe259fd74a3 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Acquired lock "refresh_cache-60bc5e14-f495-48da-8db0-54d75b523822" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 881.836684] env[62096]: DEBUG nova.network.neutron [None req-37030ed6-9fb4-44d5-b2b3-bbe259fd74a3 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: 60bc5e14-f495-48da-8db0-54d75b523822] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 881.837032] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-37030ed6-9fb4-44d5-b2b3-bbe259fd74a3 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Expecting reply to msg b8c02b7c33764690a1cde4598b9889d2 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 881.843285] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2ca811942b434e38ae2fb42b4640e4b6 [ 881.845732] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b8c02b7c33764690a1cde4598b9889d2 [ 881.900257] env[62096]: DEBUG oslo_vmware.api [None req-a49958a4-6510-4969-b280-c49ebf520e10 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Task: {'id': session[5264b2d0-7e3b-3d3c-bfd1-bd630cad6eae]5205ea39-a76e-32c7-3387-55bb7a8b5908, 'name': SearchDatastore_Task, 'duration_secs': 0.008339} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.900499] env[62096]: DEBUG oslo_concurrency.lockutils [None req-a49958a4-6510-4969-b280-c49ebf520e10 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Releasing lock "[datastore2] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110/fb764baa-3805-45c4-a694-aa91b0932110.vmdk" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 881.900744] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [None req-a49958a4-6510-4969-b280-c49ebf520e10 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110/fb764baa-3805-45c4-a694-aa91b0932110.vmdk to [datastore2] 5627f913-29d2-476e-8fde-8ea457cc56f2/5627f913-29d2-476e-8fde-8ea457cc56f2.vmdk {{(pid=62096) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 881.900999] env[62096]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b2978029-62bb-4f85-bb92-37dbaa81339d {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.907479] env[62096]: DEBUG oslo_vmware.api [None req-a49958a4-6510-4969-b280-c49ebf520e10 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Waiting for the task: (returnval){ [ 881.907479] env[62096]: value = "task-397443" [ 881.907479] env[62096]: _type = "Task" [ 881.907479] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.915285] env[62096]: DEBUG oslo_vmware.api [None req-a49958a4-6510-4969-b280-c49ebf520e10 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Task: {'id': task-397443, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.064523] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c70231a0-48a5-4b3b-b009-020c7f69a59b tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Expecting reply to msg fa6ddcf50d844dfcbddc0842d1105ca0 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 882.074811] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fa6ddcf50d844dfcbddc0842d1105ca0 [ 882.094965] env[62096]: DEBUG oslo_concurrency.lockutils [None req-cc6c4fa9-fc85-41c0-a8a8-237fec5b300e tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Lock "250ef7e7-266b-451d-8627-9cce211d4e83" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.056s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 882.095517] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-cc6c4fa9-fc85-41c0-a8a8-237fec5b300e tempest-ServerShowV257Test-2016593476 tempest-ServerShowV257Test-2016593476-project-member] Expecting reply to msg 6ac64725f9d34cfc8ac591a22b336421 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 882.109607] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6ac64725f9d34cfc8ac591a22b336421 [ 882.338803] env[62096]: INFO nova.compute.manager [-] [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] Took 1.06 seconds to deallocate network for instance. [ 882.342642] env[62096]: DEBUG nova.compute.claims [None req-0f19979c-d609-46af-b70a-3aa130c0212c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] Aborting claim: {{(pid=62096) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 882.342804] env[62096]: DEBUG oslo_concurrency.lockutils [None req-0f19979c-d609-46af-b70a-3aa130c0212c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 882.356031] env[62096]: DEBUG nova.network.neutron [None req-37030ed6-9fb4-44d5-b2b3-bbe259fd74a3 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: 60bc5e14-f495-48da-8db0-54d75b523822] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 882.418008] env[62096]: DEBUG oslo_vmware.api [None req-a49958a4-6510-4969-b280-c49ebf520e10 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Task: {'id': task-397443, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.426259} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.418985] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [None req-a49958a4-6510-4969-b280-c49ebf520e10 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110/fb764baa-3805-45c4-a694-aa91b0932110.vmdk to [datastore2] 5627f913-29d2-476e-8fde-8ea457cc56f2/5627f913-29d2-476e-8fde-8ea457cc56f2.vmdk {{(pid=62096) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 882.418985] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-a49958a4-6510-4969-b280-c49ebf520e10 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] [instance: 5627f913-29d2-476e-8fde-8ea457cc56f2] Extending root virtual disk to 1048576 {{(pid=62096) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 882.418985] env[62096]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-03495a6e-ee62-430b-9d5d-414c9f0e40dc {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.425710] env[62096]: DEBUG oslo_vmware.api [None req-a49958a4-6510-4969-b280-c49ebf520e10 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Waiting for the task: (returnval){ [ 882.425710] env[62096]: value = "task-397444" [ 882.425710] env[62096]: _type = "Task" [ 882.425710] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.433615] env[62096]: DEBUG oslo_vmware.api [None req-a49958a4-6510-4969-b280-c49ebf520e10 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Task: {'id': task-397444, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.440453] env[62096]: DEBUG nova.network.neutron [None req-37030ed6-9fb4-44d5-b2b3-bbe259fd74a3 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: 60bc5e14-f495-48da-8db0-54d75b523822] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 882.441027] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-37030ed6-9fb4-44d5-b2b3-bbe259fd74a3 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Expecting reply to msg d0d305c47798479c9023a81846ccb273 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 882.449090] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d0d305c47798479c9023a81846ccb273 [ 882.736740] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d5b7061-d2f5-4af9-acd6-cd33d760d4df {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.744021] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92e24928-c939-49f4-ac75-23404db9e4db {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.782725] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df80b88f-2e81-432e-a570-fe9f3d63c989 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.789928] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-304fd6c3-8d4f-4c5d-8bed-aa8883581bef {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.804939] env[62096]: DEBUG nova.compute.provider_tree [None req-c70231a0-48a5-4b3b-b009-020c7f69a59b tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 882.805426] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c70231a0-48a5-4b3b-b009-020c7f69a59b tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Expecting reply to msg f85c2e0ccf824cffa12fa0576205f494 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 882.811814] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f85c2e0ccf824cffa12fa0576205f494 [ 882.935434] env[62096]: DEBUG oslo_vmware.api [None req-a49958a4-6510-4969-b280-c49ebf520e10 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Task: {'id': task-397444, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.058143} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.935803] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-a49958a4-6510-4969-b280-c49ebf520e10 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] [instance: 5627f913-29d2-476e-8fde-8ea457cc56f2] Extended root virtual disk {{(pid=62096) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 882.936654] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7db7b4e4-5c1a-42d9-bc42-dc00e3a23da4 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.947734] env[62096]: DEBUG oslo_concurrency.lockutils [None req-37030ed6-9fb4-44d5-b2b3-bbe259fd74a3 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Releasing lock "refresh_cache-60bc5e14-f495-48da-8db0-54d75b523822" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 882.948187] env[62096]: DEBUG nova.compute.manager [None req-37030ed6-9fb4-44d5-b2b3-bbe259fd74a3 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: 60bc5e14-f495-48da-8db0-54d75b523822] Start destroying the instance on the hypervisor. {{(pid=62096) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 882.948447] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-37030ed6-9fb4-44d5-b2b3-bbe259fd74a3 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: 60bc5e14-f495-48da-8db0-54d75b523822] Destroying instance {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 882.957128] env[62096]: DEBUG nova.virt.vmwareapi.volumeops [None req-a49958a4-6510-4969-b280-c49ebf520e10 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] [instance: 5627f913-29d2-476e-8fde-8ea457cc56f2] Reconfiguring VM instance instance-00000048 to attach disk [datastore2] 5627f913-29d2-476e-8fde-8ea457cc56f2/5627f913-29d2-476e-8fde-8ea457cc56f2.vmdk or device None with type sparse {{(pid=62096) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 882.957420] env[62096]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0bbe5548-3acb-4e3b-91c6-42b70a0af4d2 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.959227] env[62096]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cd66effe-833c-495a-9297-0ac3f35b31d3 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.979461] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ca0501c-771d-4de9-a1e5-2d5187d6c847 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.991561] env[62096]: DEBUG oslo_vmware.api [None req-a49958a4-6510-4969-b280-c49ebf520e10 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Waiting for the task: (returnval){ [ 882.991561] env[62096]: value = "task-397445" [ 882.991561] env[62096]: _type = "Task" [ 882.991561] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.000752] env[62096]: DEBUG oslo_vmware.api [None req-a49958a4-6510-4969-b280-c49ebf520e10 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Task: {'id': task-397445, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.005675] env[62096]: WARNING nova.virt.vmwareapi.vmops [None req-37030ed6-9fb4-44d5-b2b3-bbe259fd74a3 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: 60bc5e14-f495-48da-8db0-54d75b523822] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 60bc5e14-f495-48da-8db0-54d75b523822 could not be found. [ 883.006032] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-37030ed6-9fb4-44d5-b2b3-bbe259fd74a3 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: 60bc5e14-f495-48da-8db0-54d75b523822] Instance destroyed {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 883.006329] env[62096]: INFO nova.compute.manager [None req-37030ed6-9fb4-44d5-b2b3-bbe259fd74a3 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: 60bc5e14-f495-48da-8db0-54d75b523822] Took 0.06 seconds to destroy the instance on the hypervisor. [ 883.006671] env[62096]: DEBUG oslo.service.loopingcall [None req-37030ed6-9fb4-44d5-b2b3-bbe259fd74a3 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 883.006984] env[62096]: DEBUG nova.compute.manager [-] [instance: 60bc5e14-f495-48da-8db0-54d75b523822] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 883.007173] env[62096]: DEBUG nova.network.neutron [-] [instance: 60bc5e14-f495-48da-8db0-54d75b523822] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 883.021741] env[62096]: DEBUG nova.network.neutron [-] [instance: 60bc5e14-f495-48da-8db0-54d75b523822] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 883.022309] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 367ec14ff67d4c6ebc1eb17b5191a28a in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 883.029261] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 367ec14ff67d4c6ebc1eb17b5191a28a [ 883.066791] env[62096]: DEBUG nova.compute.manager [req-489e0b79-0d22-4401-9d3b-27870eb9a078 req-8659e8ad-6461-4b19-990a-2a834eda76f4 service nova] [instance: 60bc5e14-f495-48da-8db0-54d75b523822] Received event network-changed-7e36f48d-97d8-4355-924b-977fd775b8b0 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 883.066791] env[62096]: DEBUG nova.compute.manager [req-489e0b79-0d22-4401-9d3b-27870eb9a078 req-8659e8ad-6461-4b19-990a-2a834eda76f4 service nova] [instance: 60bc5e14-f495-48da-8db0-54d75b523822] Refreshing instance network info cache due to event network-changed-7e36f48d-97d8-4355-924b-977fd775b8b0. {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 883.066791] env[62096]: DEBUG oslo_concurrency.lockutils [req-489e0b79-0d22-4401-9d3b-27870eb9a078 req-8659e8ad-6461-4b19-990a-2a834eda76f4 service nova] Acquiring lock "refresh_cache-60bc5e14-f495-48da-8db0-54d75b523822" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 883.066791] env[62096]: DEBUG oslo_concurrency.lockutils [req-489e0b79-0d22-4401-9d3b-27870eb9a078 req-8659e8ad-6461-4b19-990a-2a834eda76f4 service nova] Acquired lock "refresh_cache-60bc5e14-f495-48da-8db0-54d75b523822" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 883.067091] env[62096]: DEBUG nova.network.neutron [req-489e0b79-0d22-4401-9d3b-27870eb9a078 req-8659e8ad-6461-4b19-990a-2a834eda76f4 service nova] [instance: 60bc5e14-f495-48da-8db0-54d75b523822] Refreshing network info cache for port 7e36f48d-97d8-4355-924b-977fd775b8b0 {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 883.067600] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-489e0b79-0d22-4401-9d3b-27870eb9a078 req-8659e8ad-6461-4b19-990a-2a834eda76f4 service nova] Expecting reply to msg 763147eb371345d38449d32c6cdba47c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 883.073999] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 763147eb371345d38449d32c6cdba47c [ 883.308031] env[62096]: DEBUG nova.scheduler.client.report [None req-c70231a0-48a5-4b3b-b009-020c7f69a59b tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 883.310288] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c70231a0-48a5-4b3b-b009-020c7f69a59b tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Expecting reply to msg ab1b5a4d5b604a68ad0d0e2aedb31d21 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 883.323948] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ab1b5a4d5b604a68ad0d0e2aedb31d21 [ 883.502390] env[62096]: DEBUG oslo_vmware.api [None req-a49958a4-6510-4969-b280-c49ebf520e10 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Task: {'id': task-397445, 'name': ReconfigVM_Task, 'duration_secs': 0.236459} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.503315] env[62096]: DEBUG nova.virt.vmwareapi.volumeops [None req-a49958a4-6510-4969-b280-c49ebf520e10 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] [instance: 5627f913-29d2-476e-8fde-8ea457cc56f2] Reconfigured VM instance instance-00000048 to attach disk [datastore2] 5627f913-29d2-476e-8fde-8ea457cc56f2/5627f913-29d2-476e-8fde-8ea457cc56f2.vmdk or device None with type sparse {{(pid=62096) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 883.503930] env[62096]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7f7d21cf-db8b-4575-801c-d87a381b7deb {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.510227] env[62096]: DEBUG oslo_vmware.api [None req-a49958a4-6510-4969-b280-c49ebf520e10 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Waiting for the task: (returnval){ [ 883.510227] env[62096]: value = "task-397446" [ 883.510227] env[62096]: _type = "Task" [ 883.510227] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.517334] env[62096]: DEBUG oslo_vmware.api [None req-a49958a4-6510-4969-b280-c49ebf520e10 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Task: {'id': task-397446, 'name': Rename_Task} progress is 0%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.524611] env[62096]: DEBUG nova.network.neutron [-] [instance: 60bc5e14-f495-48da-8db0-54d75b523822] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 883.525032] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 3cb134834ebb49b58c3eb6c2f4cbf47e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 883.532908] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3cb134834ebb49b58c3eb6c2f4cbf47e [ 883.584849] env[62096]: DEBUG nova.network.neutron [req-489e0b79-0d22-4401-9d3b-27870eb9a078 req-8659e8ad-6461-4b19-990a-2a834eda76f4 service nova] [instance: 60bc5e14-f495-48da-8db0-54d75b523822] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 883.668549] env[62096]: DEBUG nova.network.neutron [req-489e0b79-0d22-4401-9d3b-27870eb9a078 req-8659e8ad-6461-4b19-990a-2a834eda76f4 service nova] [instance: 60bc5e14-f495-48da-8db0-54d75b523822] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 883.669046] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-489e0b79-0d22-4401-9d3b-27870eb9a078 req-8659e8ad-6461-4b19-990a-2a834eda76f4 service nova] Expecting reply to msg f011a5338dde45d8b654defe3246f72c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 883.677382] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f011a5338dde45d8b654defe3246f72c [ 883.812825] env[62096]: DEBUG oslo_concurrency.lockutils [None req-c70231a0-48a5-4b3b-b009-020c7f69a59b tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.255s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 883.813336] env[62096]: DEBUG nova.compute.manager [None req-c70231a0-48a5-4b3b-b009-020c7f69a59b tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] Start building networks asynchronously for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 883.815179] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c70231a0-48a5-4b3b-b009-020c7f69a59b tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Expecting reply to msg 4fe120d55f994379a332ba2db2067d7b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 883.816206] env[62096]: DEBUG oslo_concurrency.lockutils [None req-9602e4c9-801d-4ec4-85d8-84d6ec45cb64 tempest-ServersTestJSON-242390470 tempest-ServersTestJSON-242390470-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 18.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 883.818339] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-9602e4c9-801d-4ec4-85d8-84d6ec45cb64 tempest-ServersTestJSON-242390470 tempest-ServersTestJSON-242390470-project-member] Expecting reply to msg 55fa53854cae497cb00df49fdf2f7e31 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 883.850502] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4fe120d55f994379a332ba2db2067d7b [ 883.851149] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 55fa53854cae497cb00df49fdf2f7e31 [ 884.020202] env[62096]: DEBUG oslo_vmware.api [None req-a49958a4-6510-4969-b280-c49ebf520e10 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Task: {'id': task-397446, 'name': Rename_Task, 'duration_secs': 0.130968} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.020686] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [None req-a49958a4-6510-4969-b280-c49ebf520e10 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] [instance: 5627f913-29d2-476e-8fde-8ea457cc56f2] Powering on the VM {{(pid=62096) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 884.021051] env[62096]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ceeed544-2c46-48da-b4eb-690785707054 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.027148] env[62096]: INFO nova.compute.manager [-] [instance: 60bc5e14-f495-48da-8db0-54d75b523822] Took 1.02 seconds to deallocate network for instance. [ 884.027589] env[62096]: DEBUG oslo_vmware.api [None req-a49958a4-6510-4969-b280-c49ebf520e10 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Waiting for the task: (returnval){ [ 884.027589] env[62096]: value = "task-397447" [ 884.027589] env[62096]: _type = "Task" [ 884.027589] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.032463] env[62096]: DEBUG nova.compute.claims [None req-37030ed6-9fb4-44d5-b2b3-bbe259fd74a3 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: 60bc5e14-f495-48da-8db0-54d75b523822] Aborting claim: {{(pid=62096) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 884.032775] env[62096]: DEBUG oslo_concurrency.lockutils [None req-37030ed6-9fb4-44d5-b2b3-bbe259fd74a3 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 884.037233] env[62096]: DEBUG oslo_vmware.api [None req-a49958a4-6510-4969-b280-c49ebf520e10 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Task: {'id': task-397447, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.171607] env[62096]: DEBUG oslo_concurrency.lockutils [req-489e0b79-0d22-4401-9d3b-27870eb9a078 req-8659e8ad-6461-4b19-990a-2a834eda76f4 service nova] Releasing lock "refresh_cache-60bc5e14-f495-48da-8db0-54d75b523822" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 884.171887] env[62096]: DEBUG nova.compute.manager [req-489e0b79-0d22-4401-9d3b-27870eb9a078 req-8659e8ad-6461-4b19-990a-2a834eda76f4 service nova] [instance: 60bc5e14-f495-48da-8db0-54d75b523822] Received event network-vif-deleted-7e36f48d-97d8-4355-924b-977fd775b8b0 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 884.321000] env[62096]: DEBUG nova.compute.utils [None req-c70231a0-48a5-4b3b-b009-020c7f69a59b tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Using /dev/sd instead of None {{(pid=62096) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 884.321649] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c70231a0-48a5-4b3b-b009-020c7f69a59b tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Expecting reply to msg de48ffad29d644a183a88bed1d2bec6e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 884.326418] env[62096]: DEBUG nova.compute.manager [None req-c70231a0-48a5-4b3b-b009-020c7f69a59b tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] Allocating IP information in the background. {{(pid=62096) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 884.326418] env[62096]: DEBUG nova.network.neutron [None req-c70231a0-48a5-4b3b-b009-020c7f69a59b tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] allocate_for_instance() {{(pid=62096) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 884.337002] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg de48ffad29d644a183a88bed1d2bec6e [ 884.379692] env[62096]: DEBUG nova.policy [None req-c70231a0-48a5-4b3b-b009-020c7f69a59b tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'baa97847d396418f89287f224b73c464', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '98ac4c25deee45e8a655575d81f18bca', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62096) authorize /opt/stack/nova/nova/policy.py:203}} [ 884.530572] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf88a0ca-137b-416d-b11e-6bb7767a70e6 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.541565] env[62096]: DEBUG oslo_vmware.api [None req-a49958a4-6510-4969-b280-c49ebf520e10 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Task: {'id': task-397447, 'name': PowerOnVM_Task, 'duration_secs': 0.460685} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.543739] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [None req-a49958a4-6510-4969-b280-c49ebf520e10 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] [instance: 5627f913-29d2-476e-8fde-8ea457cc56f2] Powered on the VM {{(pid=62096) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 884.543965] env[62096]: DEBUG nova.compute.manager [None req-a49958a4-6510-4969-b280-c49ebf520e10 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] [instance: 5627f913-29d2-476e-8fde-8ea457cc56f2] Checking state {{(pid=62096) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 884.544835] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-849e024a-5920-4aa1-a0e2-ddd68359751e {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.548316] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14091999-947a-4a03-a833-fa2b010a3029 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.556830] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a49958a4-6510-4969-b280-c49ebf520e10 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Expecting reply to msg b4dbb1b1746549b587f217394a99e3b6 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 884.582789] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1a1a039-6a7e-4f91-8550-692d7c16f58e {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.588222] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b4dbb1b1746549b587f217394a99e3b6 [ 884.591984] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e89374a-235d-46f8-9963-c672a9d8c423 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.608774] env[62096]: DEBUG nova.compute.provider_tree [None req-9602e4c9-801d-4ec4-85d8-84d6ec45cb64 tempest-ServersTestJSON-242390470 tempest-ServersTestJSON-242390470-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 884.609289] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-9602e4c9-801d-4ec4-85d8-84d6ec45cb64 tempest-ServersTestJSON-242390470 tempest-ServersTestJSON-242390470-project-member] Expecting reply to msg 7acce1a6e22341afad09f5f1a666fc5c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 884.616478] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7acce1a6e22341afad09f5f1a666fc5c [ 884.720134] env[62096]: DEBUG nova.network.neutron [None req-c70231a0-48a5-4b3b-b009-020c7f69a59b tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] Successfully created port: fb223d70-ad13-4c1b-8030-cc860bc8f729 {{(pid=62096) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 884.826812] env[62096]: DEBUG nova.compute.manager [None req-c70231a0-48a5-4b3b-b009-020c7f69a59b tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] Start building block device mappings for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 884.828794] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c70231a0-48a5-4b3b-b009-020c7f69a59b tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Expecting reply to msg 8b705442fd5d4e57918dbec0d1680a0f in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 884.878684] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8b705442fd5d4e57918dbec0d1680a0f [ 885.088805] env[62096]: DEBUG oslo_concurrency.lockutils [None req-a49958a4-6510-4969-b280-c49ebf520e10 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 885.112140] env[62096]: DEBUG nova.scheduler.client.report [None req-9602e4c9-801d-4ec4-85d8-84d6ec45cb64 tempest-ServersTestJSON-242390470 tempest-ServersTestJSON-242390470-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 885.114646] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-9602e4c9-801d-4ec4-85d8-84d6ec45cb64 tempest-ServersTestJSON-242390470 tempest-ServersTestJSON-242390470-project-member] Expecting reply to msg 53eeb1fa71be4e8697678e0f1dd120ce in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 885.126646] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 53eeb1fa71be4e8697678e0f1dd120ce [ 885.334569] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c70231a0-48a5-4b3b-b009-020c7f69a59b tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Expecting reply to msg cd8c4e9c485c4c0b97d112b7cbe2c1df in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 885.378905] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cd8c4e9c485c4c0b97d112b7cbe2c1df [ 885.426537] env[62096]: DEBUG nova.compute.manager [req-36f8647f-2d6c-4c72-9a1f-7986ac745e2c req-eea77297-6f02-45bb-b464-d3b99cf19c1a service nova] [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] Received event network-changed-fb223d70-ad13-4c1b-8030-cc860bc8f729 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 885.426726] env[62096]: DEBUG nova.compute.manager [req-36f8647f-2d6c-4c72-9a1f-7986ac745e2c req-eea77297-6f02-45bb-b464-d3b99cf19c1a service nova] [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] Refreshing instance network info cache due to event network-changed-fb223d70-ad13-4c1b-8030-cc860bc8f729. {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 885.426941] env[62096]: DEBUG oslo_concurrency.lockutils [req-36f8647f-2d6c-4c72-9a1f-7986ac745e2c req-eea77297-6f02-45bb-b464-d3b99cf19c1a service nova] Acquiring lock "refresh_cache-f832a621-c221-4ae8-928e-d6f9fa6b0586" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 885.427283] env[62096]: DEBUG oslo_concurrency.lockutils [req-36f8647f-2d6c-4c72-9a1f-7986ac745e2c req-eea77297-6f02-45bb-b464-d3b99cf19c1a service nova] Acquired lock "refresh_cache-f832a621-c221-4ae8-928e-d6f9fa6b0586" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 885.427362] env[62096]: DEBUG nova.network.neutron [req-36f8647f-2d6c-4c72-9a1f-7986ac745e2c req-eea77297-6f02-45bb-b464-d3b99cf19c1a service nova] [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] Refreshing network info cache for port fb223d70-ad13-4c1b-8030-cc860bc8f729 {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 885.427761] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-36f8647f-2d6c-4c72-9a1f-7986ac745e2c req-eea77297-6f02-45bb-b464-d3b99cf19c1a service nova] Expecting reply to msg e1280033ae874a62bb4e5c773b7eee1f in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 885.434474] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e1280033ae874a62bb4e5c773b7eee1f [ 885.618199] env[62096]: DEBUG oslo_concurrency.lockutils [None req-9602e4c9-801d-4ec4-85d8-84d6ec45cb64 tempest-ServersTestJSON-242390470 tempest-ServersTestJSON-242390470-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.802s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 885.618844] env[62096]: ERROR nova.compute.manager [None req-9602e4c9-801d-4ec4-85d8-84d6ec45cb64 tempest-ServersTestJSON-242390470 tempest-ServersTestJSON-242390470-project-member] [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port b1b64d64-3759-4701-ae6a-64d26a18db32, please check neutron logs for more information. [ 885.618844] env[62096]: ERROR nova.compute.manager [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] Traceback (most recent call last): [ 885.618844] env[62096]: ERROR nova.compute.manager [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 885.618844] env[62096]: ERROR nova.compute.manager [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] self.driver.spawn(context, instance, image_meta, [ 885.618844] env[62096]: ERROR nova.compute.manager [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 885.618844] env[62096]: ERROR nova.compute.manager [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] self._vmops.spawn(context, instance, image_meta, injected_files, [ 885.618844] env[62096]: ERROR nova.compute.manager [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 885.618844] env[62096]: ERROR nova.compute.manager [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] vm_ref = self.build_virtual_machine(instance, [ 885.618844] env[62096]: ERROR nova.compute.manager [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 885.618844] env[62096]: ERROR nova.compute.manager [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] vif_infos = vmwarevif.get_vif_info(self._session, [ 885.618844] env[62096]: ERROR nova.compute.manager [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 885.619202] env[62096]: ERROR nova.compute.manager [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] for vif in network_info: [ 885.619202] env[62096]: ERROR nova.compute.manager [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 885.619202] env[62096]: ERROR nova.compute.manager [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] return self._sync_wrapper(fn, *args, **kwargs) [ 885.619202] env[62096]: ERROR nova.compute.manager [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 885.619202] env[62096]: ERROR nova.compute.manager [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] self.wait() [ 885.619202] env[62096]: ERROR nova.compute.manager [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 885.619202] env[62096]: ERROR nova.compute.manager [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] self[:] = self._gt.wait() [ 885.619202] env[62096]: ERROR nova.compute.manager [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 885.619202] env[62096]: ERROR nova.compute.manager [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] return self._exit_event.wait() [ 885.619202] env[62096]: ERROR nova.compute.manager [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 885.619202] env[62096]: ERROR nova.compute.manager [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] result = hub.switch() [ 885.619202] env[62096]: ERROR nova.compute.manager [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 885.619202] env[62096]: ERROR nova.compute.manager [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] return self.greenlet.switch() [ 885.619555] env[62096]: ERROR nova.compute.manager [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 885.619555] env[62096]: ERROR nova.compute.manager [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] result = function(*args, **kwargs) [ 885.619555] env[62096]: ERROR nova.compute.manager [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 885.619555] env[62096]: ERROR nova.compute.manager [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] return func(*args, **kwargs) [ 885.619555] env[62096]: ERROR nova.compute.manager [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 885.619555] env[62096]: ERROR nova.compute.manager [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] raise e [ 885.619555] env[62096]: ERROR nova.compute.manager [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 885.619555] env[62096]: ERROR nova.compute.manager [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] nwinfo = self.network_api.allocate_for_instance( [ 885.619555] env[62096]: ERROR nova.compute.manager [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 885.619555] env[62096]: ERROR nova.compute.manager [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] created_port_ids = self._update_ports_for_instance( [ 885.619555] env[62096]: ERROR nova.compute.manager [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 885.619555] env[62096]: ERROR nova.compute.manager [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] with excutils.save_and_reraise_exception(): [ 885.619555] env[62096]: ERROR nova.compute.manager [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 885.619978] env[62096]: ERROR nova.compute.manager [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] self.force_reraise() [ 885.619978] env[62096]: ERROR nova.compute.manager [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 885.619978] env[62096]: ERROR nova.compute.manager [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] raise self.value [ 885.619978] env[62096]: ERROR nova.compute.manager [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 885.619978] env[62096]: ERROR nova.compute.manager [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] updated_port = self._update_port( [ 885.619978] env[62096]: ERROR nova.compute.manager [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 885.619978] env[62096]: ERROR nova.compute.manager [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] _ensure_no_port_binding_failure(port) [ 885.619978] env[62096]: ERROR nova.compute.manager [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 885.619978] env[62096]: ERROR nova.compute.manager [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] raise exception.PortBindingFailed(port_id=port['id']) [ 885.619978] env[62096]: ERROR nova.compute.manager [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] nova.exception.PortBindingFailed: Binding failed for port b1b64d64-3759-4701-ae6a-64d26a18db32, please check neutron logs for more information. [ 885.619978] env[62096]: ERROR nova.compute.manager [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] [ 885.620333] env[62096]: DEBUG nova.compute.utils [None req-9602e4c9-801d-4ec4-85d8-84d6ec45cb64 tempest-ServersTestJSON-242390470 tempest-ServersTestJSON-242390470-project-member] [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] Binding failed for port b1b64d64-3759-4701-ae6a-64d26a18db32, please check neutron logs for more information. {{(pid=62096) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 885.620852] env[62096]: DEBUG oslo_concurrency.lockutils [None req-66bcc827-9e2c-4570-a904-739ded1c169f tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.987s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 885.628084] env[62096]: INFO nova.compute.claims [None req-66bcc827-9e2c-4570-a904-739ded1c169f tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 885.628084] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-66bcc827-9e2c-4570-a904-739ded1c169f tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Expecting reply to msg 4101f1972c19499d81f4a60991890bcf in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 885.628084] env[62096]: DEBUG nova.compute.manager [None req-9602e4c9-801d-4ec4-85d8-84d6ec45cb64 tempest-ServersTestJSON-242390470 tempest-ServersTestJSON-242390470-project-member] [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] Build of instance 5c04dded-d1c9-44fe-bf2f-de295d21b725 was re-scheduled: Binding failed for port b1b64d64-3759-4701-ae6a-64d26a18db32, please check neutron logs for more information. {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 885.628084] env[62096]: DEBUG nova.compute.manager [None req-9602e4c9-801d-4ec4-85d8-84d6ec45cb64 tempest-ServersTestJSON-242390470 tempest-ServersTestJSON-242390470-project-member] [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] Unplugging VIFs for instance {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 885.628084] env[62096]: DEBUG oslo_concurrency.lockutils [None req-9602e4c9-801d-4ec4-85d8-84d6ec45cb64 tempest-ServersTestJSON-242390470 tempest-ServersTestJSON-242390470-project-member] Acquiring lock "refresh_cache-5c04dded-d1c9-44fe-bf2f-de295d21b725" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 885.629348] env[62096]: DEBUG oslo_concurrency.lockutils [None req-9602e4c9-801d-4ec4-85d8-84d6ec45cb64 tempest-ServersTestJSON-242390470 tempest-ServersTestJSON-242390470-project-member] Acquired lock "refresh_cache-5c04dded-d1c9-44fe-bf2f-de295d21b725" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 885.629348] env[62096]: DEBUG nova.network.neutron [None req-9602e4c9-801d-4ec4-85d8-84d6ec45cb64 tempest-ServersTestJSON-242390470 tempest-ServersTestJSON-242390470-project-member] [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 885.629348] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-9602e4c9-801d-4ec4-85d8-84d6ec45cb64 tempest-ServersTestJSON-242390470 tempest-ServersTestJSON-242390470-project-member] Expecting reply to msg 884e8d8d8f1b4e059b3c7c65dde8d193 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 885.637824] env[62096]: ERROR nova.compute.manager [None req-c70231a0-48a5-4b3b-b009-020c7f69a59b tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port fb223d70-ad13-4c1b-8030-cc860bc8f729, please check neutron logs for more information. [ 885.637824] env[62096]: ERROR nova.compute.manager Traceback (most recent call last): [ 885.637824] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 885.637824] env[62096]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 885.637824] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 885.637824] env[62096]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 885.637824] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 885.637824] env[62096]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 885.637824] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 885.637824] env[62096]: ERROR nova.compute.manager self.force_reraise() [ 885.637824] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 885.637824] env[62096]: ERROR nova.compute.manager raise self.value [ 885.637824] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 885.637824] env[62096]: ERROR nova.compute.manager updated_port = self._update_port( [ 885.637824] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 885.637824] env[62096]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 885.638312] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 885.638312] env[62096]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 885.638312] env[62096]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port fb223d70-ad13-4c1b-8030-cc860bc8f729, please check neutron logs for more information. [ 885.638312] env[62096]: ERROR nova.compute.manager [ 885.638312] env[62096]: Traceback (most recent call last): [ 885.638312] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 885.638312] env[62096]: listener.cb(fileno) [ 885.638312] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 885.638312] env[62096]: result = function(*args, **kwargs) [ 885.638312] env[62096]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 885.638312] env[62096]: return func(*args, **kwargs) [ 885.638312] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 885.638312] env[62096]: raise e [ 885.638312] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 885.638312] env[62096]: nwinfo = self.network_api.allocate_for_instance( [ 885.638312] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 885.638312] env[62096]: created_port_ids = self._update_ports_for_instance( [ 885.638312] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 885.638312] env[62096]: with excutils.save_and_reraise_exception(): [ 885.638312] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 885.638312] env[62096]: self.force_reraise() [ 885.638312] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 885.638312] env[62096]: raise self.value [ 885.638312] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 885.638312] env[62096]: updated_port = self._update_port( [ 885.638312] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 885.638312] env[62096]: _ensure_no_port_binding_failure(port) [ 885.638312] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 885.638312] env[62096]: raise exception.PortBindingFailed(port_id=port['id']) [ 885.639057] env[62096]: nova.exception.PortBindingFailed: Binding failed for port fb223d70-ad13-4c1b-8030-cc860bc8f729, please check neutron logs for more information. [ 885.639057] env[62096]: Removing descriptor: 14 [ 885.670076] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 884e8d8d8f1b4e059b3c7c65dde8d193 [ 885.670749] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4101f1972c19499d81f4a60991890bcf [ 885.689970] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-85f59f7f-6abf-41c4-87c1-d5831c2c0f52 tempest-ServersAdmin275Test-238925842 tempest-ServersAdmin275Test-238925842-project-admin] Expecting reply to msg 5ced40e151bc43b29fd5cccf4017b372 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 885.702855] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5ced40e151bc43b29fd5cccf4017b372 [ 885.839457] env[62096]: DEBUG nova.compute.manager [None req-c70231a0-48a5-4b3b-b009-020c7f69a59b tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] Start spawning the instance on the hypervisor. {{(pid=62096) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 885.859914] env[62096]: DEBUG nova.virt.hardware [None req-c70231a0-48a5-4b3b-b009-020c7f69a59b tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-19T11:52:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-19T11:51:55Z,direct_url=,disk_format='vmdk',id=fb764baa-3805-45c4-a694-aa91b0932110,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bd76085d004b408bbcf1ebf76ba2593e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-19T11:51:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 885.860423] env[62096]: DEBUG nova.virt.hardware [None req-c70231a0-48a5-4b3b-b009-020c7f69a59b tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Flavor limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 885.860694] env[62096]: DEBUG nova.virt.hardware [None req-c70231a0-48a5-4b3b-b009-020c7f69a59b tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Image limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 885.861003] env[62096]: DEBUG nova.virt.hardware [None req-c70231a0-48a5-4b3b-b009-020c7f69a59b tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Flavor pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 885.861348] env[62096]: DEBUG nova.virt.hardware [None req-c70231a0-48a5-4b3b-b009-020c7f69a59b tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Image pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 885.861628] env[62096]: DEBUG nova.virt.hardware [None req-c70231a0-48a5-4b3b-b009-020c7f69a59b tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 885.861949] env[62096]: DEBUG nova.virt.hardware [None req-c70231a0-48a5-4b3b-b009-020c7f69a59b tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 885.862227] env[62096]: DEBUG nova.virt.hardware [None req-c70231a0-48a5-4b3b-b009-020c7f69a59b tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 885.862505] env[62096]: DEBUG nova.virt.hardware [None req-c70231a0-48a5-4b3b-b009-020c7f69a59b tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Got 1 possible topologies {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 885.862785] env[62096]: DEBUG nova.virt.hardware [None req-c70231a0-48a5-4b3b-b009-020c7f69a59b tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 885.863067] env[62096]: DEBUG nova.virt.hardware [None req-c70231a0-48a5-4b3b-b009-020c7f69a59b tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 885.864346] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b8959d6-c502-4c9c-a826-a31067f2133a {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.873389] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d84d2cfe-450c-4e7b-97fe-db0eea31ae4d {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.888588] env[62096]: ERROR nova.compute.manager [None req-c70231a0-48a5-4b3b-b009-020c7f69a59b tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port fb223d70-ad13-4c1b-8030-cc860bc8f729, please check neutron logs for more information. [ 885.888588] env[62096]: ERROR nova.compute.manager [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] Traceback (most recent call last): [ 885.888588] env[62096]: ERROR nova.compute.manager [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 885.888588] env[62096]: ERROR nova.compute.manager [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] yield resources [ 885.888588] env[62096]: ERROR nova.compute.manager [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 885.888588] env[62096]: ERROR nova.compute.manager [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] self.driver.spawn(context, instance, image_meta, [ 885.888588] env[62096]: ERROR nova.compute.manager [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 885.888588] env[62096]: ERROR nova.compute.manager [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] self._vmops.spawn(context, instance, image_meta, injected_files, [ 885.888588] env[62096]: ERROR nova.compute.manager [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 885.888588] env[62096]: ERROR nova.compute.manager [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] vm_ref = self.build_virtual_machine(instance, [ 885.888588] env[62096]: ERROR nova.compute.manager [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 885.889007] env[62096]: ERROR nova.compute.manager [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] vif_infos = vmwarevif.get_vif_info(self._session, [ 885.889007] env[62096]: ERROR nova.compute.manager [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 885.889007] env[62096]: ERROR nova.compute.manager [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] for vif in network_info: [ 885.889007] env[62096]: ERROR nova.compute.manager [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 885.889007] env[62096]: ERROR nova.compute.manager [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] return self._sync_wrapper(fn, *args, **kwargs) [ 885.889007] env[62096]: ERROR nova.compute.manager [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 885.889007] env[62096]: ERROR nova.compute.manager [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] self.wait() [ 885.889007] env[62096]: ERROR nova.compute.manager [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 885.889007] env[62096]: ERROR nova.compute.manager [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] self[:] = self._gt.wait() [ 885.889007] env[62096]: ERROR nova.compute.manager [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 885.889007] env[62096]: ERROR nova.compute.manager [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] return self._exit_event.wait() [ 885.889007] env[62096]: ERROR nova.compute.manager [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 885.889007] env[62096]: ERROR nova.compute.manager [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] current.throw(*self._exc) [ 885.889447] env[62096]: ERROR nova.compute.manager [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 885.889447] env[62096]: ERROR nova.compute.manager [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] result = function(*args, **kwargs) [ 885.889447] env[62096]: ERROR nova.compute.manager [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 885.889447] env[62096]: ERROR nova.compute.manager [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] return func(*args, **kwargs) [ 885.889447] env[62096]: ERROR nova.compute.manager [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 885.889447] env[62096]: ERROR nova.compute.manager [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] raise e [ 885.889447] env[62096]: ERROR nova.compute.manager [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 885.889447] env[62096]: ERROR nova.compute.manager [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] nwinfo = self.network_api.allocate_for_instance( [ 885.889447] env[62096]: ERROR nova.compute.manager [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 885.889447] env[62096]: ERROR nova.compute.manager [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] created_port_ids = self._update_ports_for_instance( [ 885.889447] env[62096]: ERROR nova.compute.manager [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 885.889447] env[62096]: ERROR nova.compute.manager [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] with excutils.save_and_reraise_exception(): [ 885.889447] env[62096]: ERROR nova.compute.manager [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 885.889850] env[62096]: ERROR nova.compute.manager [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] self.force_reraise() [ 885.889850] env[62096]: ERROR nova.compute.manager [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 885.889850] env[62096]: ERROR nova.compute.manager [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] raise self.value [ 885.889850] env[62096]: ERROR nova.compute.manager [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 885.889850] env[62096]: ERROR nova.compute.manager [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] updated_port = self._update_port( [ 885.889850] env[62096]: ERROR nova.compute.manager [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 885.889850] env[62096]: ERROR nova.compute.manager [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] _ensure_no_port_binding_failure(port) [ 885.889850] env[62096]: ERROR nova.compute.manager [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 885.889850] env[62096]: ERROR nova.compute.manager [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] raise exception.PortBindingFailed(port_id=port['id']) [ 885.889850] env[62096]: ERROR nova.compute.manager [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] nova.exception.PortBindingFailed: Binding failed for port fb223d70-ad13-4c1b-8030-cc860bc8f729, please check neutron logs for more information. [ 885.889850] env[62096]: ERROR nova.compute.manager [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] [ 885.890671] env[62096]: INFO nova.compute.manager [None req-c70231a0-48a5-4b3b-b009-020c7f69a59b tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] Terminating instance [ 885.893102] env[62096]: DEBUG oslo_concurrency.lockutils [None req-c70231a0-48a5-4b3b-b009-020c7f69a59b tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Acquiring lock "refresh_cache-f832a621-c221-4ae8-928e-d6f9fa6b0586" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 885.945307] env[62096]: DEBUG nova.network.neutron [req-36f8647f-2d6c-4c72-9a1f-7986ac745e2c req-eea77297-6f02-45bb-b464-d3b99cf19c1a service nova] [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 886.022420] env[62096]: DEBUG nova.network.neutron [req-36f8647f-2d6c-4c72-9a1f-7986ac745e2c req-eea77297-6f02-45bb-b464-d3b99cf19c1a service nova] [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 886.023442] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-36f8647f-2d6c-4c72-9a1f-7986ac745e2c req-eea77297-6f02-45bb-b464-d3b99cf19c1a service nova] Expecting reply to msg addbd9e4037b463a8d3641a96bd99264 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 886.031765] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg addbd9e4037b463a8d3641a96bd99264 [ 886.130406] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-66bcc827-9e2c-4570-a904-739ded1c169f tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Expecting reply to msg d7e4802168824e19a81d059aa04e5a3c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 886.142565] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d7e4802168824e19a81d059aa04e5a3c [ 886.147551] env[62096]: DEBUG nova.network.neutron [None req-9602e4c9-801d-4ec4-85d8-84d6ec45cb64 tempest-ServersTestJSON-242390470 tempest-ServersTestJSON-242390470-project-member] [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 886.191938] env[62096]: INFO nova.compute.manager [None req-85f59f7f-6abf-41c4-87c1-d5831c2c0f52 tempest-ServersAdmin275Test-238925842 tempest-ServersAdmin275Test-238925842-project-admin] [instance: 5627f913-29d2-476e-8fde-8ea457cc56f2] Rebuilding instance [ 886.249692] env[62096]: DEBUG nova.compute.manager [None req-85f59f7f-6abf-41c4-87c1-d5831c2c0f52 tempest-ServersAdmin275Test-238925842 tempest-ServersAdmin275Test-238925842-project-admin] [instance: 5627f913-29d2-476e-8fde-8ea457cc56f2] Checking state {{(pid=62096) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 886.250562] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97ab9519-37ad-418f-9204-42fafc7963c3 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.254239] env[62096]: DEBUG nova.network.neutron [None req-9602e4c9-801d-4ec4-85d8-84d6ec45cb64 tempest-ServersTestJSON-242390470 tempest-ServersTestJSON-242390470-project-member] [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 886.254689] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-9602e4c9-801d-4ec4-85d8-84d6ec45cb64 tempest-ServersTestJSON-242390470 tempest-ServersTestJSON-242390470-project-member] Expecting reply to msg cf38cf196a1d495dabf73f6b5f7fc015 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 886.258406] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-85f59f7f-6abf-41c4-87c1-d5831c2c0f52 tempest-ServersAdmin275Test-238925842 tempest-ServersAdmin275Test-238925842-project-admin] Expecting reply to msg dd04df4ecc294221bcbf6ab381b7f9fe in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 886.262732] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cf38cf196a1d495dabf73f6b5f7fc015 [ 886.319449] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dd04df4ecc294221bcbf6ab381b7f9fe [ 886.525788] env[62096]: DEBUG oslo_concurrency.lockutils [req-36f8647f-2d6c-4c72-9a1f-7986ac745e2c req-eea77297-6f02-45bb-b464-d3b99cf19c1a service nova] Releasing lock "refresh_cache-f832a621-c221-4ae8-928e-d6f9fa6b0586" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 886.526172] env[62096]: DEBUG oslo_concurrency.lockutils [None req-c70231a0-48a5-4b3b-b009-020c7f69a59b tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Acquired lock "refresh_cache-f832a621-c221-4ae8-928e-d6f9fa6b0586" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 886.526365] env[62096]: DEBUG nova.network.neutron [None req-c70231a0-48a5-4b3b-b009-020c7f69a59b tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 886.526799] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c70231a0-48a5-4b3b-b009-020c7f69a59b tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Expecting reply to msg d14b7c1e34574387bbf25ff688060935 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 886.533456] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d14b7c1e34574387bbf25ff688060935 [ 886.759941] env[62096]: DEBUG oslo_concurrency.lockutils [None req-9602e4c9-801d-4ec4-85d8-84d6ec45cb64 tempest-ServersTestJSON-242390470 tempest-ServersTestJSON-242390470-project-member] Releasing lock "refresh_cache-5c04dded-d1c9-44fe-bf2f-de295d21b725" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 886.760204] env[62096]: DEBUG nova.compute.manager [None req-9602e4c9-801d-4ec4-85d8-84d6ec45cb64 tempest-ServersTestJSON-242390470 tempest-ServersTestJSON-242390470-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 886.760387] env[62096]: DEBUG nova.compute.manager [None req-9602e4c9-801d-4ec4-85d8-84d6ec45cb64 tempest-ServersTestJSON-242390470 tempest-ServersTestJSON-242390470-project-member] [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 886.760556] env[62096]: DEBUG nova.network.neutron [None req-9602e4c9-801d-4ec4-85d8-84d6ec45cb64 tempest-ServersTestJSON-242390470 tempest-ServersTestJSON-242390470-project-member] [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 886.763416] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [None req-85f59f7f-6abf-41c4-87c1-d5831c2c0f52 tempest-ServersAdmin275Test-238925842 tempest-ServersAdmin275Test-238925842-project-admin] [instance: 5627f913-29d2-476e-8fde-8ea457cc56f2] Powering off the VM {{(pid=62096) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 886.763684] env[62096]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ecefdcf0-488c-42c9-a3c5-ef259851f53d {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.775353] env[62096]: DEBUG oslo_vmware.api [None req-85f59f7f-6abf-41c4-87c1-d5831c2c0f52 tempest-ServersAdmin275Test-238925842 tempest-ServersAdmin275Test-238925842-project-admin] Waiting for the task: (returnval){ [ 886.775353] env[62096]: value = "task-397448" [ 886.775353] env[62096]: _type = "Task" [ 886.775353] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.787938] env[62096]: DEBUG oslo_vmware.api [None req-85f59f7f-6abf-41c4-87c1-d5831c2c0f52 tempest-ServersAdmin275Test-238925842 tempest-ServersAdmin275Test-238925842-project-admin] Task: {'id': task-397448, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.788771] env[62096]: DEBUG nova.network.neutron [None req-9602e4c9-801d-4ec4-85d8-84d6ec45cb64 tempest-ServersTestJSON-242390470 tempest-ServersTestJSON-242390470-project-member] [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 886.789432] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-9602e4c9-801d-4ec4-85d8-84d6ec45cb64 tempest-ServersTestJSON-242390470 tempest-ServersTestJSON-242390470-project-member] Expecting reply to msg bed781bc7d704ebda305b0fab275a75e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 886.800830] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bed781bc7d704ebda305b0fab275a75e [ 886.856669] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09cf019e-f2fc-40fe-bbe4-0ed17713efb0 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.864589] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90e65533-c047-49f6-94e7-83e22a572c5e {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.895513] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d59d37c4-61d3-448a-9632-29b1d31dd52a {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.903254] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70444483-92d2-43a3-a7ce-e309bf63dba9 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.916494] env[62096]: DEBUG nova.compute.provider_tree [None req-66bcc827-9e2c-4570-a904-739ded1c169f tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 886.917051] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-66bcc827-9e2c-4570-a904-739ded1c169f tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Expecting reply to msg a8faee410a814538ae6192878709ac00 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 886.924304] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a8faee410a814538ae6192878709ac00 [ 887.042919] env[62096]: DEBUG nova.network.neutron [None req-c70231a0-48a5-4b3b-b009-020c7f69a59b tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 887.112048] env[62096]: DEBUG nova.network.neutron [None req-c70231a0-48a5-4b3b-b009-020c7f69a59b tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 887.112578] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c70231a0-48a5-4b3b-b009-020c7f69a59b tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Expecting reply to msg 5287aab044b64615be499d9bf31ad2a7 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 887.120467] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5287aab044b64615be499d9bf31ad2a7 [ 887.290863] env[62096]: DEBUG oslo_vmware.api [None req-85f59f7f-6abf-41c4-87c1-d5831c2c0f52 tempest-ServersAdmin275Test-238925842 tempest-ServersAdmin275Test-238925842-project-admin] Task: {'id': task-397448, 'name': PowerOffVM_Task, 'duration_secs': 0.14248} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.290863] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [None req-85f59f7f-6abf-41c4-87c1-d5831c2c0f52 tempest-ServersAdmin275Test-238925842 tempest-ServersAdmin275Test-238925842-project-admin] [instance: 5627f913-29d2-476e-8fde-8ea457cc56f2] Powered off the VM {{(pid=62096) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 887.290863] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-85f59f7f-6abf-41c4-87c1-d5831c2c0f52 tempest-ServersAdmin275Test-238925842 tempest-ServersAdmin275Test-238925842-project-admin] [instance: 5627f913-29d2-476e-8fde-8ea457cc56f2] Destroying instance {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 887.290863] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b267c7f0-76d5-466c-88cb-f8c99e4176ef {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.292893] env[62096]: DEBUG nova.network.neutron [None req-9602e4c9-801d-4ec4-85d8-84d6ec45cb64 tempest-ServersTestJSON-242390470 tempest-ServersTestJSON-242390470-project-member] [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 887.293329] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-9602e4c9-801d-4ec4-85d8-84d6ec45cb64 tempest-ServersTestJSON-242390470 tempest-ServersTestJSON-242390470-project-member] Expecting reply to msg d4c29120657940f69b7ad9e88f434834 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 887.294140] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-85f59f7f-6abf-41c4-87c1-d5831c2c0f52 tempest-ServersAdmin275Test-238925842 tempest-ServersAdmin275Test-238925842-project-admin] [instance: 5627f913-29d2-476e-8fde-8ea457cc56f2] Unregistering the VM {{(pid=62096) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 887.294360] env[62096]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c601e196-baad-48d2-a3d9-9825d77d8521 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.301319] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d4c29120657940f69b7ad9e88f434834 [ 887.318240] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-85f59f7f-6abf-41c4-87c1-d5831c2c0f52 tempest-ServersAdmin275Test-238925842 tempest-ServersAdmin275Test-238925842-project-admin] [instance: 5627f913-29d2-476e-8fde-8ea457cc56f2] Unregistered the VM {{(pid=62096) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 887.318450] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-85f59f7f-6abf-41c4-87c1-d5831c2c0f52 tempest-ServersAdmin275Test-238925842 tempest-ServersAdmin275Test-238925842-project-admin] [instance: 5627f913-29d2-476e-8fde-8ea457cc56f2] Deleting contents of the VM from datastore datastore2 {{(pid=62096) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 887.318633] env[62096]: DEBUG nova.virt.vmwareapi.ds_util [None req-85f59f7f-6abf-41c4-87c1-d5831c2c0f52 tempest-ServersAdmin275Test-238925842 tempest-ServersAdmin275Test-238925842-project-admin] Deleting the datastore file [datastore2] 5627f913-29d2-476e-8fde-8ea457cc56f2 {{(pid=62096) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 887.318865] env[62096]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-01fc892a-60bf-4146-b105-e71bbcfbb7ff {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.324427] env[62096]: DEBUG oslo_vmware.api [None req-85f59f7f-6abf-41c4-87c1-d5831c2c0f52 tempest-ServersAdmin275Test-238925842 tempest-ServersAdmin275Test-238925842-project-admin] Waiting for the task: (returnval){ [ 887.324427] env[62096]: value = "task-397450" [ 887.324427] env[62096]: _type = "Task" [ 887.324427] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.331493] env[62096]: DEBUG oslo_vmware.api [None req-85f59f7f-6abf-41c4-87c1-d5831c2c0f52 tempest-ServersAdmin275Test-238925842 tempest-ServersAdmin275Test-238925842-project-admin] Task: {'id': task-397450, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.419547] env[62096]: DEBUG nova.scheduler.client.report [None req-66bcc827-9e2c-4570-a904-739ded1c169f tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 887.421962] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-66bcc827-9e2c-4570-a904-739ded1c169f tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Expecting reply to msg 3a8ab66c4f4a40528b3ac0413ff1da8d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 887.433413] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3a8ab66c4f4a40528b3ac0413ff1da8d [ 887.451210] env[62096]: DEBUG nova.compute.manager [req-2db8b446-5933-4929-91df-ef3c420a3871 req-80de7580-4ea1-4e1d-80ed-fb0cfa47d0bf service nova] [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] Received event network-vif-deleted-fb223d70-ad13-4c1b-8030-cc860bc8f729 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 887.615380] env[62096]: DEBUG oslo_concurrency.lockutils [None req-c70231a0-48a5-4b3b-b009-020c7f69a59b tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Releasing lock "refresh_cache-f832a621-c221-4ae8-928e-d6f9fa6b0586" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 887.615741] env[62096]: DEBUG nova.compute.manager [None req-c70231a0-48a5-4b3b-b009-020c7f69a59b tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] Start destroying the instance on the hypervisor. {{(pid=62096) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 887.615936] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-c70231a0-48a5-4b3b-b009-020c7f69a59b tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] Destroying instance {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 887.616258] env[62096]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e64a7ebc-6cb3-4b35-a9b9-494b7354d7bb {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.625578] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c4ff02a-1d08-4426-b499-f7abd5bc46af {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.646142] env[62096]: WARNING nova.virt.vmwareapi.vmops [None req-c70231a0-48a5-4b3b-b009-020c7f69a59b tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance f832a621-c221-4ae8-928e-d6f9fa6b0586 could not be found. [ 887.646381] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-c70231a0-48a5-4b3b-b009-020c7f69a59b tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] Instance destroyed {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 887.646572] env[62096]: INFO nova.compute.manager [None req-c70231a0-48a5-4b3b-b009-020c7f69a59b tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] Took 0.03 seconds to destroy the instance on the hypervisor. [ 887.646812] env[62096]: DEBUG oslo.service.loopingcall [None req-c70231a0-48a5-4b3b-b009-020c7f69a59b tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 887.647031] env[62096]: DEBUG nova.compute.manager [-] [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 887.647122] env[62096]: DEBUG nova.network.neutron [-] [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 887.661194] env[62096]: DEBUG nova.network.neutron [-] [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 887.661657] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 79d000354805409a872f94de8da7a4ff in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 887.668319] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 79d000354805409a872f94de8da7a4ff [ 887.796630] env[62096]: INFO nova.compute.manager [None req-9602e4c9-801d-4ec4-85d8-84d6ec45cb64 tempest-ServersTestJSON-242390470 tempest-ServersTestJSON-242390470-project-member] [instance: 5c04dded-d1c9-44fe-bf2f-de295d21b725] Took 1.04 seconds to deallocate network for instance. [ 887.798520] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-9602e4c9-801d-4ec4-85d8-84d6ec45cb64 tempest-ServersTestJSON-242390470 tempest-ServersTestJSON-242390470-project-member] Expecting reply to msg 8d7309da90e4437eb850e099e7cf5ef4 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 887.835256] env[62096]: DEBUG oslo_vmware.api [None req-85f59f7f-6abf-41c4-87c1-d5831c2c0f52 tempest-ServersAdmin275Test-238925842 tempest-ServersAdmin275Test-238925842-project-admin] Task: {'id': task-397450, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.132162} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.835611] env[62096]: DEBUG nova.virt.vmwareapi.ds_util [None req-85f59f7f-6abf-41c4-87c1-d5831c2c0f52 tempest-ServersAdmin275Test-238925842 tempest-ServersAdmin275Test-238925842-project-admin] Deleted the datastore file {{(pid=62096) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 887.835611] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-85f59f7f-6abf-41c4-87c1-d5831c2c0f52 tempest-ServersAdmin275Test-238925842 tempest-ServersAdmin275Test-238925842-project-admin] [instance: 5627f913-29d2-476e-8fde-8ea457cc56f2] Deleted contents of the VM from datastore datastore2 {{(pid=62096) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 887.835796] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-85f59f7f-6abf-41c4-87c1-d5831c2c0f52 tempest-ServersAdmin275Test-238925842 tempest-ServersAdmin275Test-238925842-project-admin] [instance: 5627f913-29d2-476e-8fde-8ea457cc56f2] Instance destroyed {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 887.837835] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-85f59f7f-6abf-41c4-87c1-d5831c2c0f52 tempest-ServersAdmin275Test-238925842 tempest-ServersAdmin275Test-238925842-project-admin] Expecting reply to msg 6b747df26dd14965b01da96e0bd05bb6 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 887.844738] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8d7309da90e4437eb850e099e7cf5ef4 [ 887.886692] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6b747df26dd14965b01da96e0bd05bb6 [ 887.924584] env[62096]: DEBUG oslo_concurrency.lockutils [None req-66bcc827-9e2c-4570-a904-739ded1c169f tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.304s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 887.925195] env[62096]: DEBUG nova.compute.manager [None req-66bcc827-9e2c-4570-a904-739ded1c169f tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] Start building networks asynchronously for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 887.927127] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-66bcc827-9e2c-4570-a904-739ded1c169f tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Expecting reply to msg b9b94d7781004d59974da80b9d207dfc in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 887.928147] env[62096]: DEBUG oslo_concurrency.lockutils [None req-d8bc9aca-755b-4171-8a37-b8947b203c4b tempest-ServerPasswordTestJSON-1799855642 tempest-ServerPasswordTestJSON-1799855642-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.507s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 887.937633] env[62096]: INFO nova.compute.claims [None req-d8bc9aca-755b-4171-8a37-b8947b203c4b tempest-ServerPasswordTestJSON-1799855642 tempest-ServerPasswordTestJSON-1799855642-project-member] [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 887.937633] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d8bc9aca-755b-4171-8a37-b8947b203c4b tempest-ServerPasswordTestJSON-1799855642 tempest-ServerPasswordTestJSON-1799855642-project-member] Expecting reply to msg 6abf90f9daab49619e6097da3d64eb16 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 887.963148] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b9b94d7781004d59974da80b9d207dfc [ 887.963722] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6abf90f9daab49619e6097da3d64eb16 [ 888.163741] env[62096]: DEBUG nova.network.neutron [-] [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 888.164275] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg dacfed16df7243838f21dba26f4b1b11 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 888.174983] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dacfed16df7243838f21dba26f4b1b11 [ 888.303606] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-9602e4c9-801d-4ec4-85d8-84d6ec45cb64 tempest-ServersTestJSON-242390470 tempest-ServersTestJSON-242390470-project-member] Expecting reply to msg 94beb828ca764fd29e31a3bef6432424 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 888.340452] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 94beb828ca764fd29e31a3bef6432424 [ 888.350759] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-85f59f7f-6abf-41c4-87c1-d5831c2c0f52 tempest-ServersAdmin275Test-238925842 tempest-ServersAdmin275Test-238925842-project-admin] Expecting reply to msg a01b33ab75034444b5fc335315357f1f in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 888.407282] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a01b33ab75034444b5fc335315357f1f [ 888.434200] env[62096]: DEBUG nova.compute.utils [None req-66bcc827-9e2c-4570-a904-739ded1c169f tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Using /dev/sd instead of None {{(pid=62096) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 888.434822] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-66bcc827-9e2c-4570-a904-739ded1c169f tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Expecting reply to msg 275feb2dc7b6412d9834fa102e2c8511 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 888.436062] env[62096]: DEBUG nova.compute.manager [None req-66bcc827-9e2c-4570-a904-739ded1c169f tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] Allocating IP information in the background. {{(pid=62096) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 888.436240] env[62096]: DEBUG nova.network.neutron [None req-66bcc827-9e2c-4570-a904-739ded1c169f tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] allocate_for_instance() {{(pid=62096) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 888.439364] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d8bc9aca-755b-4171-8a37-b8947b203c4b tempest-ServerPasswordTestJSON-1799855642 tempest-ServerPasswordTestJSON-1799855642-project-member] Expecting reply to msg 3e6511079eca427c9ae83ee0c7aed64c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 888.444414] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 275feb2dc7b6412d9834fa102e2c8511 [ 888.447200] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3e6511079eca427c9ae83ee0c7aed64c [ 888.489518] env[62096]: DEBUG nova.policy [None req-66bcc827-9e2c-4570-a904-739ded1c169f tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '224d54f476614dfb873e768d577afad6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '73069c3d34654084b7122d5d642d38a5', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62096) authorize /opt/stack/nova/nova/policy.py:203}} [ 888.666543] env[62096]: INFO nova.compute.manager [-] [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] Took 1.02 seconds to deallocate network for instance. [ 888.668927] env[62096]: DEBUG nova.compute.claims [None req-c70231a0-48a5-4b3b-b009-020c7f69a59b tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] Aborting claim: {{(pid=62096) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 888.669113] env[62096]: DEBUG oslo_concurrency.lockutils [None req-c70231a0-48a5-4b3b-b009-020c7f69a59b tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 888.771267] env[62096]: DEBUG nova.network.neutron [None req-66bcc827-9e2c-4570-a904-739ded1c169f tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] Successfully created port: e927db6d-5558-4472-a05c-256404ce7a0e {{(pid=62096) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 888.824947] env[62096]: INFO nova.scheduler.client.report [None req-9602e4c9-801d-4ec4-85d8-84d6ec45cb64 tempest-ServersTestJSON-242390470 tempest-ServersTestJSON-242390470-project-member] Deleted allocations for instance 5c04dded-d1c9-44fe-bf2f-de295d21b725 [ 888.832492] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-9602e4c9-801d-4ec4-85d8-84d6ec45cb64 tempest-ServersTestJSON-242390470 tempest-ServersTestJSON-242390470-project-member] Expecting reply to msg 4842d38658824145bf9f09ef512dc2be in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 888.846303] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4842d38658824145bf9f09ef512dc2be [ 888.875419] env[62096]: DEBUG nova.virt.hardware [None req-85f59f7f-6abf-41c4-87c1-d5831c2c0f52 tempest-ServersAdmin275Test-238925842 tempest-ServersAdmin275Test-238925842-project-admin] Getting desirable topologies for flavor Flavor(created_at=2024-08-19T11:52:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-19T11:51:55Z,direct_url=,disk_format='vmdk',id=fb764baa-3805-45c4-a694-aa91b0932110,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bd76085d004b408bbcf1ebf76ba2593e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-19T11:51:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 888.875690] env[62096]: DEBUG nova.virt.hardware [None req-85f59f7f-6abf-41c4-87c1-d5831c2c0f52 tempest-ServersAdmin275Test-238925842 tempest-ServersAdmin275Test-238925842-project-admin] Flavor limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 888.875866] env[62096]: DEBUG nova.virt.hardware [None req-85f59f7f-6abf-41c4-87c1-d5831c2c0f52 tempest-ServersAdmin275Test-238925842 tempest-ServersAdmin275Test-238925842-project-admin] Image limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 888.876061] env[62096]: DEBUG nova.virt.hardware [None req-85f59f7f-6abf-41c4-87c1-d5831c2c0f52 tempest-ServersAdmin275Test-238925842 tempest-ServersAdmin275Test-238925842-project-admin] Flavor pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 888.876212] env[62096]: DEBUG nova.virt.hardware [None req-85f59f7f-6abf-41c4-87c1-d5831c2c0f52 tempest-ServersAdmin275Test-238925842 tempest-ServersAdmin275Test-238925842-project-admin] Image pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 888.876609] env[62096]: DEBUG nova.virt.hardware [None req-85f59f7f-6abf-41c4-87c1-d5831c2c0f52 tempest-ServersAdmin275Test-238925842 tempest-ServersAdmin275Test-238925842-project-admin] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 888.876850] env[62096]: DEBUG nova.virt.hardware [None req-85f59f7f-6abf-41c4-87c1-d5831c2c0f52 tempest-ServersAdmin275Test-238925842 tempest-ServersAdmin275Test-238925842-project-admin] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 888.877059] env[62096]: DEBUG nova.virt.hardware [None req-85f59f7f-6abf-41c4-87c1-d5831c2c0f52 tempest-ServersAdmin275Test-238925842 tempest-ServersAdmin275Test-238925842-project-admin] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 888.877295] env[62096]: DEBUG nova.virt.hardware [None req-85f59f7f-6abf-41c4-87c1-d5831c2c0f52 tempest-ServersAdmin275Test-238925842 tempest-ServersAdmin275Test-238925842-project-admin] Got 1 possible topologies {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 888.877674] env[62096]: DEBUG nova.virt.hardware [None req-85f59f7f-6abf-41c4-87c1-d5831c2c0f52 tempest-ServersAdmin275Test-238925842 tempest-ServersAdmin275Test-238925842-project-admin] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 888.877881] env[62096]: DEBUG nova.virt.hardware [None req-85f59f7f-6abf-41c4-87c1-d5831c2c0f52 tempest-ServersAdmin275Test-238925842 tempest-ServersAdmin275Test-238925842-project-admin] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 888.878777] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bf13cb9-a365-4dbc-af33-75640fc735e4 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.889270] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abd9a8d2-cf0a-46e7-8199-676fafc379e0 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.906416] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-85f59f7f-6abf-41c4-87c1-d5831c2c0f52 tempest-ServersAdmin275Test-238925842 tempest-ServersAdmin275Test-238925842-project-admin] [instance: 5627f913-29d2-476e-8fde-8ea457cc56f2] Instance VIF info [] {{(pid=62096) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 888.913252] env[62096]: DEBUG oslo.service.loopingcall [None req-85f59f7f-6abf-41c4-87c1-d5831c2c0f52 tempest-ServersAdmin275Test-238925842 tempest-ServersAdmin275Test-238925842-project-admin] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 888.913504] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5627f913-29d2-476e-8fde-8ea457cc56f2] Creating VM on the ESX host {{(pid=62096) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 888.913718] env[62096]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-64b6a80a-8d69-45ef-bc8a-1650e4cd1c3c {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.934692] env[62096]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 888.934692] env[62096]: value = "task-397451" [ 888.934692] env[62096]: _type = "Task" [ 888.934692] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.942905] env[62096]: DEBUG nova.compute.manager [None req-66bcc827-9e2c-4570-a904-739ded1c169f tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] Start building block device mappings for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 888.944725] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-66bcc827-9e2c-4570-a904-739ded1c169f tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Expecting reply to msg dc598c5680404d949dcc9bbb15c4d3cf in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 888.948047] env[62096]: DEBUG oslo_vmware.api [-] Task: {'id': task-397451, 'name': CreateVM_Task} progress is 0%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.986116] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dc598c5680404d949dcc9bbb15c4d3cf [ 889.149758] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6c4001f-99aa-4aeb-858d-9b6d4188c77c {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.157282] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61260bfb-8a71-487d-bf36-b2fa6435afc5 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.190817] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2af166a0-6cac-480d-b0af-68b2b54b9856 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.198819] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc8036c4-ebee-4619-920c-48ccc7b861ac {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.212864] env[62096]: DEBUG nova.compute.provider_tree [None req-d8bc9aca-755b-4171-8a37-b8947b203c4b tempest-ServerPasswordTestJSON-1799855642 tempest-ServerPasswordTestJSON-1799855642-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 889.213445] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d8bc9aca-755b-4171-8a37-b8947b203c4b tempest-ServerPasswordTestJSON-1799855642 tempest-ServerPasswordTestJSON-1799855642-project-member] Expecting reply to msg ffcbb77388884aa0871700eec9cb252b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 889.220529] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ffcbb77388884aa0871700eec9cb252b [ 889.334686] env[62096]: DEBUG oslo_concurrency.lockutils [None req-9602e4c9-801d-4ec4-85d8-84d6ec45cb64 tempest-ServersTestJSON-242390470 tempest-ServersTestJSON-242390470-project-member] Lock "5c04dded-d1c9-44fe-bf2f-de295d21b725" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 117.532s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 889.335406] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-064fa4d0-df95-4930-985c-d30de278cda0 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Expecting reply to msg c0543bf3e38d43feb501a1b2bb44f7ab in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 889.348967] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c0543bf3e38d43feb501a1b2bb44f7ab [ 889.444083] env[62096]: DEBUG oslo_vmware.api [-] Task: {'id': task-397451, 'name': CreateVM_Task, 'duration_secs': 0.321141} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.444278] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5627f913-29d2-476e-8fde-8ea457cc56f2] Created VM on the ESX host {{(pid=62096) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 889.444700] env[62096]: DEBUG oslo_concurrency.lockutils [None req-85f59f7f-6abf-41c4-87c1-d5831c2c0f52 tempest-ServersAdmin275Test-238925842 tempest-ServersAdmin275Test-238925842-project-admin] Acquiring lock "[datastore2] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 889.444859] env[62096]: DEBUG oslo_concurrency.lockutils [None req-85f59f7f-6abf-41c4-87c1-d5831c2c0f52 tempest-ServersAdmin275Test-238925842 tempest-ServersAdmin275Test-238925842-project-admin] Acquired lock "[datastore2] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 889.445213] env[62096]: DEBUG oslo_concurrency.lockutils [None req-85f59f7f-6abf-41c4-87c1-d5831c2c0f52 tempest-ServersAdmin275Test-238925842 tempest-ServersAdmin275Test-238925842-project-admin] Acquired external semaphore "[datastore2] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 889.445465] env[62096]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-72699e7b-e999-46f1-94bb-1a6c90524ac1 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.451493] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-66bcc827-9e2c-4570-a904-739ded1c169f tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Expecting reply to msg c5256626d90a4795980d0309829a255d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 889.456040] env[62096]: DEBUG oslo_vmware.api [None req-85f59f7f-6abf-41c4-87c1-d5831c2c0f52 tempest-ServersAdmin275Test-238925842 tempest-ServersAdmin275Test-238925842-project-admin] Waiting for the task: (returnval){ [ 889.456040] env[62096]: value = "session[5264b2d0-7e3b-3d3c-bfd1-bd630cad6eae]5266c857-e79d-566e-e8ea-2e7c440771e3" [ 889.456040] env[62096]: _type = "Task" [ 889.456040] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 889.462000] env[62096]: DEBUG oslo_vmware.api [None req-85f59f7f-6abf-41c4-87c1-d5831c2c0f52 tempest-ServersAdmin275Test-238925842 tempest-ServersAdmin275Test-238925842-project-admin] Task: {'id': session[5264b2d0-7e3b-3d3c-bfd1-bd630cad6eae]5266c857-e79d-566e-e8ea-2e7c440771e3, 'name': SearchDatastore_Task, 'duration_secs': 0.009625} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.462267] env[62096]: DEBUG oslo_concurrency.lockutils [None req-85f59f7f-6abf-41c4-87c1-d5831c2c0f52 tempest-ServersAdmin275Test-238925842 tempest-ServersAdmin275Test-238925842-project-admin] Releasing lock "[datastore2] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 889.462484] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-85f59f7f-6abf-41c4-87c1-d5831c2c0f52 tempest-ServersAdmin275Test-238925842 tempest-ServersAdmin275Test-238925842-project-admin] [instance: 5627f913-29d2-476e-8fde-8ea457cc56f2] Processing image fb764baa-3805-45c4-a694-aa91b0932110 {{(pid=62096) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 889.462708] env[62096]: DEBUG oslo_concurrency.lockutils [None req-85f59f7f-6abf-41c4-87c1-d5831c2c0f52 tempest-ServersAdmin275Test-238925842 tempest-ServersAdmin275Test-238925842-project-admin] Acquiring lock "[datastore2] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110/fb764baa-3805-45c4-a694-aa91b0932110.vmdk" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 889.462847] env[62096]: DEBUG oslo_concurrency.lockutils [None req-85f59f7f-6abf-41c4-87c1-d5831c2c0f52 tempest-ServersAdmin275Test-238925842 tempest-ServersAdmin275Test-238925842-project-admin] Acquired lock "[datastore2] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110/fb764baa-3805-45c4-a694-aa91b0932110.vmdk" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 889.463015] env[62096]: DEBUG nova.virt.vmwareapi.ds_util [None req-85f59f7f-6abf-41c4-87c1-d5831c2c0f52 tempest-ServersAdmin275Test-238925842 tempest-ServersAdmin275Test-238925842-project-admin] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62096) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 889.463271] env[62096]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2f2ad3e6-0ea6-4bda-bc7b-d5725847b9cc {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.470813] env[62096]: DEBUG nova.virt.vmwareapi.ds_util [None req-85f59f7f-6abf-41c4-87c1-d5831c2c0f52 tempest-ServersAdmin275Test-238925842 tempest-ServersAdmin275Test-238925842-project-admin] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62096) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 889.470990] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-85f59f7f-6abf-41c4-87c1-d5831c2c0f52 tempest-ServersAdmin275Test-238925842 tempest-ServersAdmin275Test-238925842-project-admin] Folder [datastore2] devstack-image-cache_base created. {{(pid=62096) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 889.471742] env[62096]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1ff17c99-7b8a-43d1-805a-dc312762905f {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.476804] env[62096]: DEBUG oslo_vmware.api [None req-85f59f7f-6abf-41c4-87c1-d5831c2c0f52 tempest-ServersAdmin275Test-238925842 tempest-ServersAdmin275Test-238925842-project-admin] Waiting for the task: (returnval){ [ 889.476804] env[62096]: value = "session[5264b2d0-7e3b-3d3c-bfd1-bd630cad6eae]52181a78-45e9-009c-40c6-568aec475faa" [ 889.476804] env[62096]: _type = "Task" [ 889.476804] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 889.486065] env[62096]: DEBUG oslo_vmware.api [None req-85f59f7f-6abf-41c4-87c1-d5831c2c0f52 tempest-ServersAdmin275Test-238925842 tempest-ServersAdmin275Test-238925842-project-admin] Task: {'id': session[5264b2d0-7e3b-3d3c-bfd1-bd630cad6eae]52181a78-45e9-009c-40c6-568aec475faa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.493458] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c5256626d90a4795980d0309829a255d [ 889.638336] env[62096]: DEBUG nova.compute.manager [req-24d003b6-df5f-45cb-8bb0-26dca364569d req-c45b268b-b63b-40c8-a0ba-fa5b85942379 service nova] [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] Received event network-changed-e927db6d-5558-4472-a05c-256404ce7a0e {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 889.638551] env[62096]: DEBUG nova.compute.manager [req-24d003b6-df5f-45cb-8bb0-26dca364569d req-c45b268b-b63b-40c8-a0ba-fa5b85942379 service nova] [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] Refreshing instance network info cache due to event network-changed-e927db6d-5558-4472-a05c-256404ce7a0e. {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 889.638767] env[62096]: DEBUG oslo_concurrency.lockutils [req-24d003b6-df5f-45cb-8bb0-26dca364569d req-c45b268b-b63b-40c8-a0ba-fa5b85942379 service nova] Acquiring lock "refresh_cache-ce80b353-2f7c-4165-b4fa-b81a1e539295" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 889.638940] env[62096]: DEBUG oslo_concurrency.lockutils [req-24d003b6-df5f-45cb-8bb0-26dca364569d req-c45b268b-b63b-40c8-a0ba-fa5b85942379 service nova] Acquired lock "refresh_cache-ce80b353-2f7c-4165-b4fa-b81a1e539295" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 889.639109] env[62096]: DEBUG nova.network.neutron [req-24d003b6-df5f-45cb-8bb0-26dca364569d req-c45b268b-b63b-40c8-a0ba-fa5b85942379 service nova] [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] Refreshing network info cache for port e927db6d-5558-4472-a05c-256404ce7a0e {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 889.639521] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-24d003b6-df5f-45cb-8bb0-26dca364569d req-c45b268b-b63b-40c8-a0ba-fa5b85942379 service nova] Expecting reply to msg b6a424f9106d48cb90254c962f53174b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 889.646004] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b6a424f9106d48cb90254c962f53174b [ 889.716600] env[62096]: DEBUG nova.scheduler.client.report [None req-d8bc9aca-755b-4171-8a37-b8947b203c4b tempest-ServerPasswordTestJSON-1799855642 tempest-ServerPasswordTestJSON-1799855642-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 889.718999] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d8bc9aca-755b-4171-8a37-b8947b203c4b tempest-ServerPasswordTestJSON-1799855642 tempest-ServerPasswordTestJSON-1799855642-project-member] Expecting reply to msg 4113ef6962c94e72a1a1856ee41dcb77 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 889.732126] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4113ef6962c94e72a1a1856ee41dcb77 [ 889.787237] env[62096]: ERROR nova.compute.manager [None req-66bcc827-9e2c-4570-a904-739ded1c169f tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port e927db6d-5558-4472-a05c-256404ce7a0e, please check neutron logs for more information. [ 889.787237] env[62096]: ERROR nova.compute.manager Traceback (most recent call last): [ 889.787237] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 889.787237] env[62096]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 889.787237] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 889.787237] env[62096]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 889.787237] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 889.787237] env[62096]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 889.787237] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 889.787237] env[62096]: ERROR nova.compute.manager self.force_reraise() [ 889.787237] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 889.787237] env[62096]: ERROR nova.compute.manager raise self.value [ 889.787237] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 889.787237] env[62096]: ERROR nova.compute.manager updated_port = self._update_port( [ 889.787237] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 889.787237] env[62096]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 889.787727] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 889.787727] env[62096]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 889.787727] env[62096]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port e927db6d-5558-4472-a05c-256404ce7a0e, please check neutron logs for more information. [ 889.787727] env[62096]: ERROR nova.compute.manager [ 889.787727] env[62096]: Traceback (most recent call last): [ 889.787727] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 889.787727] env[62096]: listener.cb(fileno) [ 889.787727] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 889.787727] env[62096]: result = function(*args, **kwargs) [ 889.787727] env[62096]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 889.787727] env[62096]: return func(*args, **kwargs) [ 889.787727] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 889.787727] env[62096]: raise e [ 889.787727] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 889.787727] env[62096]: nwinfo = self.network_api.allocate_for_instance( [ 889.787727] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 889.787727] env[62096]: created_port_ids = self._update_ports_for_instance( [ 889.787727] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 889.787727] env[62096]: with excutils.save_and_reraise_exception(): [ 889.787727] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 889.787727] env[62096]: self.force_reraise() [ 889.787727] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 889.787727] env[62096]: raise self.value [ 889.787727] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 889.787727] env[62096]: updated_port = self._update_port( [ 889.787727] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 889.787727] env[62096]: _ensure_no_port_binding_failure(port) [ 889.787727] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 889.787727] env[62096]: raise exception.PortBindingFailed(port_id=port['id']) [ 889.788561] env[62096]: nova.exception.PortBindingFailed: Binding failed for port e927db6d-5558-4472-a05c-256404ce7a0e, please check neutron logs for more information. [ 889.788561] env[62096]: Removing descriptor: 14 [ 889.837677] env[62096]: DEBUG nova.compute.manager [None req-064fa4d0-df95-4930-985c-d30de278cda0 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] [instance: 8802af61-3692-4e27-85f4-945afccff231] Starting instance... {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 889.839655] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-064fa4d0-df95-4930-985c-d30de278cda0 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Expecting reply to msg e2a793ac41dd411a932aabae491ca609 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 889.871631] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e2a793ac41dd411a932aabae491ca609 [ 889.956307] env[62096]: DEBUG nova.compute.manager [None req-66bcc827-9e2c-4570-a904-739ded1c169f tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] Start spawning the instance on the hypervisor. {{(pid=62096) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 889.982644] env[62096]: DEBUG nova.virt.hardware [None req-66bcc827-9e2c-4570-a904-739ded1c169f tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-19T11:52:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-19T11:51:55Z,direct_url=,disk_format='vmdk',id=fb764baa-3805-45c4-a694-aa91b0932110,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bd76085d004b408bbcf1ebf76ba2593e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-19T11:51:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 889.982896] env[62096]: DEBUG nova.virt.hardware [None req-66bcc827-9e2c-4570-a904-739ded1c169f tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Flavor limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 889.983075] env[62096]: DEBUG nova.virt.hardware [None req-66bcc827-9e2c-4570-a904-739ded1c169f tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Image limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 889.983255] env[62096]: DEBUG nova.virt.hardware [None req-66bcc827-9e2c-4570-a904-739ded1c169f tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Flavor pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 889.983404] env[62096]: DEBUG nova.virt.hardware [None req-66bcc827-9e2c-4570-a904-739ded1c169f tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Image pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 889.983568] env[62096]: DEBUG nova.virt.hardware [None req-66bcc827-9e2c-4570-a904-739ded1c169f tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 889.983854] env[62096]: DEBUG nova.virt.hardware [None req-66bcc827-9e2c-4570-a904-739ded1c169f tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 889.984082] env[62096]: DEBUG nova.virt.hardware [None req-66bcc827-9e2c-4570-a904-739ded1c169f tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 889.984275] env[62096]: DEBUG nova.virt.hardware [None req-66bcc827-9e2c-4570-a904-739ded1c169f tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Got 1 possible topologies {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 889.984669] env[62096]: DEBUG nova.virt.hardware [None req-66bcc827-9e2c-4570-a904-739ded1c169f tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 889.984669] env[62096]: DEBUG nova.virt.hardware [None req-66bcc827-9e2c-4570-a904-739ded1c169f tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 889.985609] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b4cc4e0-e44a-49d4-9cc1-7da1da8639a2 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.994024] env[62096]: DEBUG oslo_vmware.api [None req-85f59f7f-6abf-41c4-87c1-d5831c2c0f52 tempest-ServersAdmin275Test-238925842 tempest-ServersAdmin275Test-238925842-project-admin] Task: {'id': session[5264b2d0-7e3b-3d3c-bfd1-bd630cad6eae]52181a78-45e9-009c-40c6-568aec475faa, 'name': SearchDatastore_Task, 'duration_secs': 0.008076} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.996636] env[62096]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a3fab6b3-ff0b-4b10-9a75-8572507b0107 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.002192] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d972d34-433b-4b12-9531-3d6ba9ed676d {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.009845] env[62096]: DEBUG oslo_vmware.api [None req-85f59f7f-6abf-41c4-87c1-d5831c2c0f52 tempest-ServersAdmin275Test-238925842 tempest-ServersAdmin275Test-238925842-project-admin] Waiting for the task: (returnval){ [ 890.009845] env[62096]: value = "session[5264b2d0-7e3b-3d3c-bfd1-bd630cad6eae]523dcc0a-f002-40f1-5fb5-a2ab457ab852" [ 890.009845] env[62096]: _type = "Task" [ 890.009845] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.020129] env[62096]: ERROR nova.compute.manager [None req-66bcc827-9e2c-4570-a904-739ded1c169f tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port e927db6d-5558-4472-a05c-256404ce7a0e, please check neutron logs for more information. [ 890.020129] env[62096]: ERROR nova.compute.manager [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] Traceback (most recent call last): [ 890.020129] env[62096]: ERROR nova.compute.manager [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 890.020129] env[62096]: ERROR nova.compute.manager [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] yield resources [ 890.020129] env[62096]: ERROR nova.compute.manager [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 890.020129] env[62096]: ERROR nova.compute.manager [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] self.driver.spawn(context, instance, image_meta, [ 890.020129] env[62096]: ERROR nova.compute.manager [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 890.020129] env[62096]: ERROR nova.compute.manager [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] self._vmops.spawn(context, instance, image_meta, injected_files, [ 890.020129] env[62096]: ERROR nova.compute.manager [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 890.020129] env[62096]: ERROR nova.compute.manager [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] vm_ref = self.build_virtual_machine(instance, [ 890.020129] env[62096]: ERROR nova.compute.manager [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 890.020488] env[62096]: ERROR nova.compute.manager [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] vif_infos = vmwarevif.get_vif_info(self._session, [ 890.020488] env[62096]: ERROR nova.compute.manager [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 890.020488] env[62096]: ERROR nova.compute.manager [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] for vif in network_info: [ 890.020488] env[62096]: ERROR nova.compute.manager [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 890.020488] env[62096]: ERROR nova.compute.manager [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] return self._sync_wrapper(fn, *args, **kwargs) [ 890.020488] env[62096]: ERROR nova.compute.manager [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 890.020488] env[62096]: ERROR nova.compute.manager [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] self.wait() [ 890.020488] env[62096]: ERROR nova.compute.manager [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 890.020488] env[62096]: ERROR nova.compute.manager [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] self[:] = self._gt.wait() [ 890.020488] env[62096]: ERROR nova.compute.manager [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 890.020488] env[62096]: ERROR nova.compute.manager [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] return self._exit_event.wait() [ 890.020488] env[62096]: ERROR nova.compute.manager [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 890.020488] env[62096]: ERROR nova.compute.manager [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] current.throw(*self._exc) [ 890.022000] env[62096]: ERROR nova.compute.manager [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 890.022000] env[62096]: ERROR nova.compute.manager [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] result = function(*args, **kwargs) [ 890.022000] env[62096]: ERROR nova.compute.manager [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 890.022000] env[62096]: ERROR nova.compute.manager [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] return func(*args, **kwargs) [ 890.022000] env[62096]: ERROR nova.compute.manager [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 890.022000] env[62096]: ERROR nova.compute.manager [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] raise e [ 890.022000] env[62096]: ERROR nova.compute.manager [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 890.022000] env[62096]: ERROR nova.compute.manager [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] nwinfo = self.network_api.allocate_for_instance( [ 890.022000] env[62096]: ERROR nova.compute.manager [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 890.022000] env[62096]: ERROR nova.compute.manager [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] created_port_ids = self._update_ports_for_instance( [ 890.022000] env[62096]: ERROR nova.compute.manager [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 890.022000] env[62096]: ERROR nova.compute.manager [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] with excutils.save_and_reraise_exception(): [ 890.022000] env[62096]: ERROR nova.compute.manager [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 890.022484] env[62096]: ERROR nova.compute.manager [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] self.force_reraise() [ 890.022484] env[62096]: ERROR nova.compute.manager [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 890.022484] env[62096]: ERROR nova.compute.manager [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] raise self.value [ 890.022484] env[62096]: ERROR nova.compute.manager [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 890.022484] env[62096]: ERROR nova.compute.manager [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] updated_port = self._update_port( [ 890.022484] env[62096]: ERROR nova.compute.manager [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 890.022484] env[62096]: ERROR nova.compute.manager [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] _ensure_no_port_binding_failure(port) [ 890.022484] env[62096]: ERROR nova.compute.manager [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 890.022484] env[62096]: ERROR nova.compute.manager [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] raise exception.PortBindingFailed(port_id=port['id']) [ 890.022484] env[62096]: ERROR nova.compute.manager [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] nova.exception.PortBindingFailed: Binding failed for port e927db6d-5558-4472-a05c-256404ce7a0e, please check neutron logs for more information. [ 890.022484] env[62096]: ERROR nova.compute.manager [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] [ 890.022484] env[62096]: INFO nova.compute.manager [None req-66bcc827-9e2c-4570-a904-739ded1c169f tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] Terminating instance [ 890.024669] env[62096]: DEBUG oslo_concurrency.lockutils [None req-66bcc827-9e2c-4570-a904-739ded1c169f tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Acquiring lock "refresh_cache-ce80b353-2f7c-4165-b4fa-b81a1e539295" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 890.029462] env[62096]: DEBUG oslo_vmware.api [None req-85f59f7f-6abf-41c4-87c1-d5831c2c0f52 tempest-ServersAdmin275Test-238925842 tempest-ServersAdmin275Test-238925842-project-admin] Task: {'id': session[5264b2d0-7e3b-3d3c-bfd1-bd630cad6eae]523dcc0a-f002-40f1-5fb5-a2ab457ab852, 'name': SearchDatastore_Task, 'duration_secs': 0.009043} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.029702] env[62096]: DEBUG oslo_concurrency.lockutils [None req-85f59f7f-6abf-41c4-87c1-d5831c2c0f52 tempest-ServersAdmin275Test-238925842 tempest-ServersAdmin275Test-238925842-project-admin] Releasing lock "[datastore2] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110/fb764baa-3805-45c4-a694-aa91b0932110.vmdk" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 890.029943] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [None req-85f59f7f-6abf-41c4-87c1-d5831c2c0f52 tempest-ServersAdmin275Test-238925842 tempest-ServersAdmin275Test-238925842-project-admin] Copying Virtual Disk [datastore2] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110/fb764baa-3805-45c4-a694-aa91b0932110.vmdk to [datastore2] 5627f913-29d2-476e-8fde-8ea457cc56f2/5627f913-29d2-476e-8fde-8ea457cc56f2.vmdk {{(pid=62096) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 890.030185] env[62096]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e8a623b9-949b-499f-9499-a7f84cb6c9bd {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.038019] env[62096]: DEBUG oslo_vmware.api [None req-85f59f7f-6abf-41c4-87c1-d5831c2c0f52 tempest-ServersAdmin275Test-238925842 tempest-ServersAdmin275Test-238925842-project-admin] Waiting for the task: (returnval){ [ 890.038019] env[62096]: value = "task-397452" [ 890.038019] env[62096]: _type = "Task" [ 890.038019] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.046154] env[62096]: DEBUG oslo_vmware.api [None req-85f59f7f-6abf-41c4-87c1-d5831c2c0f52 tempest-ServersAdmin275Test-238925842 tempest-ServersAdmin275Test-238925842-project-admin] Task: {'id': task-397452, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.162950] env[62096]: DEBUG nova.network.neutron [req-24d003b6-df5f-45cb-8bb0-26dca364569d req-c45b268b-b63b-40c8-a0ba-fa5b85942379 service nova] [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 890.221914] env[62096]: DEBUG oslo_concurrency.lockutils [None req-d8bc9aca-755b-4171-8a37-b8947b203c4b tempest-ServerPasswordTestJSON-1799855642 tempest-ServerPasswordTestJSON-1799855642-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.294s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 890.222649] env[62096]: DEBUG nova.compute.manager [None req-d8bc9aca-755b-4171-8a37-b8947b203c4b tempest-ServerPasswordTestJSON-1799855642 tempest-ServerPasswordTestJSON-1799855642-project-member] [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] Start building networks asynchronously for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 890.224494] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d8bc9aca-755b-4171-8a37-b8947b203c4b tempest-ServerPasswordTestJSON-1799855642 tempest-ServerPasswordTestJSON-1799855642-project-member] Expecting reply to msg 414a6af0b96e491c845a2a29917f578c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 890.226187] env[62096]: DEBUG oslo_concurrency.lockutils [None req-e0ec7c35-3ae1-4bda-b6d5-d68ea34f2cbc tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 18.741s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 890.226489] env[62096]: DEBUG nova.objects.instance [None req-e0ec7c35-3ae1-4bda-b6d5-d68ea34f2cbc tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] [instance: a7838568-9d47-4306-8bb6-9ad74ab1feb3] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62096) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 890.228198] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-e0ec7c35-3ae1-4bda-b6d5-d68ea34f2cbc tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Expecting reply to msg b8f80197155f4bde9f9206e49efb4993 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 890.240584] env[62096]: DEBUG nova.network.neutron [req-24d003b6-df5f-45cb-8bb0-26dca364569d req-c45b268b-b63b-40c8-a0ba-fa5b85942379 service nova] [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 890.241135] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-24d003b6-df5f-45cb-8bb0-26dca364569d req-c45b268b-b63b-40c8-a0ba-fa5b85942379 service nova] Expecting reply to msg 1f3179a0bf9f4994975b353d390b936a in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 890.261753] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1f3179a0bf9f4994975b353d390b936a [ 890.262807] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b8f80197155f4bde9f9206e49efb4993 [ 890.265618] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 414a6af0b96e491c845a2a29917f578c [ 890.361609] env[62096]: DEBUG oslo_concurrency.lockutils [None req-064fa4d0-df95-4930-985c-d30de278cda0 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 890.548435] env[62096]: DEBUG oslo_vmware.api [None req-85f59f7f-6abf-41c4-87c1-d5831c2c0f52 tempest-ServersAdmin275Test-238925842 tempest-ServersAdmin275Test-238925842-project-admin] Task: {'id': task-397452, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.442083} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.548696] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [None req-85f59f7f-6abf-41c4-87c1-d5831c2c0f52 tempest-ServersAdmin275Test-238925842 tempest-ServersAdmin275Test-238925842-project-admin] Copied Virtual Disk [datastore2] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110/fb764baa-3805-45c4-a694-aa91b0932110.vmdk to [datastore2] 5627f913-29d2-476e-8fde-8ea457cc56f2/5627f913-29d2-476e-8fde-8ea457cc56f2.vmdk {{(pid=62096) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 890.548907] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-85f59f7f-6abf-41c4-87c1-d5831c2c0f52 tempest-ServersAdmin275Test-238925842 tempest-ServersAdmin275Test-238925842-project-admin] [instance: 5627f913-29d2-476e-8fde-8ea457cc56f2] Extending root virtual disk to 1048576 {{(pid=62096) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 890.549160] env[62096]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d35f3a51-872a-43f6-af06-637edeb50af1 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.555292] env[62096]: DEBUG oslo_vmware.api [None req-85f59f7f-6abf-41c4-87c1-d5831c2c0f52 tempest-ServersAdmin275Test-238925842 tempest-ServersAdmin275Test-238925842-project-admin] Waiting for the task: (returnval){ [ 890.555292] env[62096]: value = "task-397453" [ 890.555292] env[62096]: _type = "Task" [ 890.555292] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.562674] env[62096]: DEBUG oslo_vmware.api [None req-85f59f7f-6abf-41c4-87c1-d5831c2c0f52 tempest-ServersAdmin275Test-238925842 tempest-ServersAdmin275Test-238925842-project-admin] Task: {'id': task-397453, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.727926] env[62096]: DEBUG nova.compute.utils [None req-d8bc9aca-755b-4171-8a37-b8947b203c4b tempest-ServerPasswordTestJSON-1799855642 tempest-ServerPasswordTestJSON-1799855642-project-member] Using /dev/sd instead of None {{(pid=62096) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 890.728585] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d8bc9aca-755b-4171-8a37-b8947b203c4b tempest-ServerPasswordTestJSON-1799855642 tempest-ServerPasswordTestJSON-1799855642-project-member] Expecting reply to msg de8dfb71296746bda574fc3474877a71 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 890.729524] env[62096]: DEBUG nova.compute.manager [None req-d8bc9aca-755b-4171-8a37-b8947b203c4b tempest-ServerPasswordTestJSON-1799855642 tempest-ServerPasswordTestJSON-1799855642-project-member] [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] Allocating IP information in the background. {{(pid=62096) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 890.729695] env[62096]: DEBUG nova.network.neutron [None req-d8bc9aca-755b-4171-8a37-b8947b203c4b tempest-ServerPasswordTestJSON-1799855642 tempest-ServerPasswordTestJSON-1799855642-project-member] [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] allocate_for_instance() {{(pid=62096) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 890.734160] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-e0ec7c35-3ae1-4bda-b6d5-d68ea34f2cbc tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Expecting reply to msg bc51e4e5204c42edbc8d778f7b4612e3 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 890.743866] env[62096]: DEBUG oslo_concurrency.lockutils [req-24d003b6-df5f-45cb-8bb0-26dca364569d req-c45b268b-b63b-40c8-a0ba-fa5b85942379 service nova] Releasing lock "refresh_cache-ce80b353-2f7c-4165-b4fa-b81a1e539295" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 890.744590] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bc51e4e5204c42edbc8d778f7b4612e3 [ 890.745051] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg de8dfb71296746bda574fc3474877a71 [ 890.745407] env[62096]: DEBUG oslo_concurrency.lockutils [None req-66bcc827-9e2c-4570-a904-739ded1c169f tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Acquired lock "refresh_cache-ce80b353-2f7c-4165-b4fa-b81a1e539295" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 890.745586] env[62096]: DEBUG nova.network.neutron [None req-66bcc827-9e2c-4570-a904-739ded1c169f tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 890.745975] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-66bcc827-9e2c-4570-a904-739ded1c169f tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Expecting reply to msg cbc3df933fac45489d90b75d7c6b0012 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 890.751258] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cbc3df933fac45489d90b75d7c6b0012 [ 890.792647] env[62096]: DEBUG nova.policy [None req-d8bc9aca-755b-4171-8a37-b8947b203c4b tempest-ServerPasswordTestJSON-1799855642 tempest-ServerPasswordTestJSON-1799855642-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3d896ad28c0a40b989d443e97c6e98ab', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '667e826df33c48dc913c130c5ecc3e0a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62096) authorize /opt/stack/nova/nova/policy.py:203}} [ 891.065957] env[62096]: DEBUG oslo_vmware.api [None req-85f59f7f-6abf-41c4-87c1-d5831c2c0f52 tempest-ServersAdmin275Test-238925842 tempest-ServersAdmin275Test-238925842-project-admin] Task: {'id': task-397453, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07018} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.066562] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-85f59f7f-6abf-41c4-87c1-d5831c2c0f52 tempest-ServersAdmin275Test-238925842 tempest-ServersAdmin275Test-238925842-project-admin] [instance: 5627f913-29d2-476e-8fde-8ea457cc56f2] Extended root virtual disk {{(pid=62096) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 891.067527] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4aa28de-3ec6-49d0-aca4-d359c09f8496 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.097582] env[62096]: DEBUG nova.virt.vmwareapi.volumeops [None req-85f59f7f-6abf-41c4-87c1-d5831c2c0f52 tempest-ServersAdmin275Test-238925842 tempest-ServersAdmin275Test-238925842-project-admin] [instance: 5627f913-29d2-476e-8fde-8ea457cc56f2] Reconfiguring VM instance instance-00000048 to attach disk [datastore2] 5627f913-29d2-476e-8fde-8ea457cc56f2/5627f913-29d2-476e-8fde-8ea457cc56f2.vmdk or device None with type sparse {{(pid=62096) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 891.098671] env[62096]: DEBUG nova.network.neutron [None req-d8bc9aca-755b-4171-8a37-b8947b203c4b tempest-ServerPasswordTestJSON-1799855642 tempest-ServerPasswordTestJSON-1799855642-project-member] [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] Successfully created port: 6c4e23f6-89de-4e27-b743-db4133b589d4 {{(pid=62096) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 891.104055] env[62096]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9028ece6-3a7e-44a6-9b7b-9f4057bffddf {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.127723] env[62096]: DEBUG oslo_vmware.api [None req-85f59f7f-6abf-41c4-87c1-d5831c2c0f52 tempest-ServersAdmin275Test-238925842 tempest-ServersAdmin275Test-238925842-project-admin] Waiting for the task: (returnval){ [ 891.127723] env[62096]: value = "task-397454" [ 891.127723] env[62096]: _type = "Task" [ 891.127723] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.134295] env[62096]: DEBUG oslo_vmware.api [None req-85f59f7f-6abf-41c4-87c1-d5831c2c0f52 tempest-ServersAdmin275Test-238925842 tempest-ServersAdmin275Test-238925842-project-admin] Task: {'id': task-397454, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.233240] env[62096]: DEBUG nova.compute.manager [None req-d8bc9aca-755b-4171-8a37-b8947b203c4b tempest-ServerPasswordTestJSON-1799855642 tempest-ServerPasswordTestJSON-1799855642-project-member] [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] Start building block device mappings for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 891.235063] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d8bc9aca-755b-4171-8a37-b8947b203c4b tempest-ServerPasswordTestJSON-1799855642 tempest-ServerPasswordTestJSON-1799855642-project-member] Expecting reply to msg bd29d17572804cb99f969c7d4e35cba8 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 891.236878] env[62096]: DEBUG oslo_concurrency.lockutils [None req-e0ec7c35-3ae1-4bda-b6d5-d68ea34f2cbc tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.011s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 891.237198] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-e0ec7c35-3ae1-4bda-b6d5-d68ea34f2cbc tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Expecting reply to msg cc17f6078bb343458187b39528b72921 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 891.237948] env[62096]: DEBUG oslo_concurrency.lockutils [None req-bb5902e0-898e-4848-be60-0804ae5f57a5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 19.434s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 891.239625] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-bb5902e0-898e-4848-be60-0804ae5f57a5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg b428b4f8400d4e95891c50543ed4b00a in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 891.253791] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cc17f6078bb343458187b39528b72921 [ 891.270502] env[62096]: DEBUG nova.network.neutron [None req-66bcc827-9e2c-4570-a904-739ded1c169f tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 891.293354] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bd29d17572804cb99f969c7d4e35cba8 [ 891.300815] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b428b4f8400d4e95891c50543ed4b00a [ 891.436581] env[62096]: DEBUG nova.network.neutron [None req-66bcc827-9e2c-4570-a904-739ded1c169f tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 891.436581] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-66bcc827-9e2c-4570-a904-739ded1c169f tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Expecting reply to msg e9b53aafab5a45739c478c06fdb39fcf in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 891.444021] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e9b53aafab5a45739c478c06fdb39fcf [ 891.636481] env[62096]: DEBUG oslo_vmware.api [None req-85f59f7f-6abf-41c4-87c1-d5831c2c0f52 tempest-ServersAdmin275Test-238925842 tempest-ServersAdmin275Test-238925842-project-admin] Task: {'id': task-397454, 'name': ReconfigVM_Task, 'duration_secs': 0.262395} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.637001] env[62096]: DEBUG nova.virt.vmwareapi.volumeops [None req-85f59f7f-6abf-41c4-87c1-d5831c2c0f52 tempest-ServersAdmin275Test-238925842 tempest-ServersAdmin275Test-238925842-project-admin] [instance: 5627f913-29d2-476e-8fde-8ea457cc56f2] Reconfigured VM instance instance-00000048 to attach disk [datastore2] 5627f913-29d2-476e-8fde-8ea457cc56f2/5627f913-29d2-476e-8fde-8ea457cc56f2.vmdk or device None with type sparse {{(pid=62096) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 891.637754] env[62096]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9e67ab5f-dd25-4e13-a9c9-314f5e461f6e {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.644384] env[62096]: DEBUG oslo_vmware.api [None req-85f59f7f-6abf-41c4-87c1-d5831c2c0f52 tempest-ServersAdmin275Test-238925842 tempest-ServersAdmin275Test-238925842-project-admin] Waiting for the task: (returnval){ [ 891.644384] env[62096]: value = "task-397455" [ 891.644384] env[62096]: _type = "Task" [ 891.644384] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.652846] env[62096]: DEBUG oslo_vmware.api [None req-85f59f7f-6abf-41c4-87c1-d5831c2c0f52 tempest-ServersAdmin275Test-238925842 tempest-ServersAdmin275Test-238925842-project-admin] Task: {'id': task-397455, 'name': Rename_Task} progress is 5%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.745414] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d8bc9aca-755b-4171-8a37-b8947b203c4b tempest-ServerPasswordTestJSON-1799855642 tempest-ServerPasswordTestJSON-1799855642-project-member] Expecting reply to msg d87ae43efc37428c897d6c6a2d7297bb in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 891.794832] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d87ae43efc37428c897d6c6a2d7297bb [ 891.860629] env[62096]: DEBUG nova.compute.manager [req-ad2e2991-c76f-43ef-b79d-7222391a5e2e req-5ecb2fee-d8a9-4077-bc31-5d7f28f34704 service nova] [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] Received event network-vif-deleted-e927db6d-5558-4472-a05c-256404ce7a0e {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 891.938577] env[62096]: DEBUG oslo_concurrency.lockutils [None req-66bcc827-9e2c-4570-a904-739ded1c169f tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Releasing lock "refresh_cache-ce80b353-2f7c-4165-b4fa-b81a1e539295" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 891.938993] env[62096]: DEBUG nova.compute.manager [None req-66bcc827-9e2c-4570-a904-739ded1c169f tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] Start destroying the instance on the hypervisor. {{(pid=62096) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 891.939190] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-66bcc827-9e2c-4570-a904-739ded1c169f tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] Destroying instance {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 891.939477] env[62096]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f7049e41-cd1b-425c-a9c3-7a9694bb292e {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.948027] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b825f045-7676-472f-b80d-1b4657963f01 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.971729] env[62096]: WARNING nova.virt.vmwareapi.vmops [None req-66bcc827-9e2c-4570-a904-739ded1c169f tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance ce80b353-2f7c-4165-b4fa-b81a1e539295 could not be found. [ 891.971949] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-66bcc827-9e2c-4570-a904-739ded1c169f tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] Instance destroyed {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 891.972201] env[62096]: INFO nova.compute.manager [None req-66bcc827-9e2c-4570-a904-739ded1c169f tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] Took 0.03 seconds to destroy the instance on the hypervisor. [ 891.972472] env[62096]: DEBUG oslo.service.loopingcall [None req-66bcc827-9e2c-4570-a904-739ded1c169f tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 891.974971] env[62096]: DEBUG nova.compute.manager [-] [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 891.975056] env[62096]: DEBUG nova.network.neutron [-] [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 891.996774] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2a7361a-0305-4a03-942c-9fb6218f72f4 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.999900] env[62096]: DEBUG nova.network.neutron [-] [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 892.000420] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 59be5d4c036040b09bcc2ce01d2a617c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 892.005922] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7007eb09-10d1-401f-b3c3-d6dfa5cfe0e6 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.010431] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 59be5d4c036040b09bcc2ce01d2a617c [ 892.055822] env[62096]: ERROR nova.compute.manager [None req-d8bc9aca-755b-4171-8a37-b8947b203c4b tempest-ServerPasswordTestJSON-1799855642 tempest-ServerPasswordTestJSON-1799855642-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 6c4e23f6-89de-4e27-b743-db4133b589d4, please check neutron logs for more information. [ 892.055822] env[62096]: ERROR nova.compute.manager Traceback (most recent call last): [ 892.055822] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 892.055822] env[62096]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 892.055822] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 892.055822] env[62096]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 892.055822] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 892.055822] env[62096]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 892.055822] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 892.055822] env[62096]: ERROR nova.compute.manager self.force_reraise() [ 892.055822] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 892.055822] env[62096]: ERROR nova.compute.manager raise self.value [ 892.055822] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 892.055822] env[62096]: ERROR nova.compute.manager updated_port = self._update_port( [ 892.055822] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 892.055822] env[62096]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 892.056365] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 892.056365] env[62096]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 892.056365] env[62096]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 6c4e23f6-89de-4e27-b743-db4133b589d4, please check neutron logs for more information. [ 892.056365] env[62096]: ERROR nova.compute.manager [ 892.056365] env[62096]: Traceback (most recent call last): [ 892.056365] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 892.056365] env[62096]: listener.cb(fileno) [ 892.056365] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 892.056365] env[62096]: result = function(*args, **kwargs) [ 892.056365] env[62096]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 892.056365] env[62096]: return func(*args, **kwargs) [ 892.056365] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 892.056365] env[62096]: raise e [ 892.056365] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 892.056365] env[62096]: nwinfo = self.network_api.allocate_for_instance( [ 892.056365] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 892.056365] env[62096]: created_port_ids = self._update_ports_for_instance( [ 892.056365] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 892.056365] env[62096]: with excutils.save_and_reraise_exception(): [ 892.056365] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 892.056365] env[62096]: self.force_reraise() [ 892.056365] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 892.056365] env[62096]: raise self.value [ 892.056365] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 892.056365] env[62096]: updated_port = self._update_port( [ 892.056365] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 892.056365] env[62096]: _ensure_no_port_binding_failure(port) [ 892.056365] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 892.056365] env[62096]: raise exception.PortBindingFailed(port_id=port['id']) [ 892.057192] env[62096]: nova.exception.PortBindingFailed: Binding failed for port 6c4e23f6-89de-4e27-b743-db4133b589d4, please check neutron logs for more information. [ 892.057192] env[62096]: Removing descriptor: 14 [ 892.057343] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c30f4e8-e94b-41e4-b982-5d1440a5f277 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.067266] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb1fa372-f382-49f8-ae66-5df55e913383 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.082646] env[62096]: DEBUG nova.compute.provider_tree [None req-bb5902e0-898e-4848-be60-0804ae5f57a5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 892.083187] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-bb5902e0-898e-4848-be60-0804ae5f57a5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg f30fc0f2fcf843fb9356ae1ff7036c91 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 892.090501] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f30fc0f2fcf843fb9356ae1ff7036c91 [ 892.153355] env[62096]: DEBUG oslo_vmware.api [None req-85f59f7f-6abf-41c4-87c1-d5831c2c0f52 tempest-ServersAdmin275Test-238925842 tempest-ServersAdmin275Test-238925842-project-admin] Task: {'id': task-397455, 'name': Rename_Task, 'duration_secs': 0.13327} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.153698] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [None req-85f59f7f-6abf-41c4-87c1-d5831c2c0f52 tempest-ServersAdmin275Test-238925842 tempest-ServersAdmin275Test-238925842-project-admin] [instance: 5627f913-29d2-476e-8fde-8ea457cc56f2] Powering on the VM {{(pid=62096) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 892.154011] env[62096]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bce8177f-a4ea-44b6-9227-881dee09d071 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.159683] env[62096]: DEBUG oslo_vmware.api [None req-85f59f7f-6abf-41c4-87c1-d5831c2c0f52 tempest-ServersAdmin275Test-238925842 tempest-ServersAdmin275Test-238925842-project-admin] Waiting for the task: (returnval){ [ 892.159683] env[62096]: value = "task-397456" [ 892.159683] env[62096]: _type = "Task" [ 892.159683] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.167489] env[62096]: DEBUG oslo_vmware.api [None req-85f59f7f-6abf-41c4-87c1-d5831c2c0f52 tempest-ServersAdmin275Test-238925842 tempest-ServersAdmin275Test-238925842-project-admin] Task: {'id': task-397456, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.251608] env[62096]: DEBUG nova.compute.manager [None req-d8bc9aca-755b-4171-8a37-b8947b203c4b tempest-ServerPasswordTestJSON-1799855642 tempest-ServerPasswordTestJSON-1799855642-project-member] [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] Start spawning the instance on the hypervisor. {{(pid=62096) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 892.278902] env[62096]: DEBUG nova.virt.hardware [None req-d8bc9aca-755b-4171-8a37-b8947b203c4b tempest-ServerPasswordTestJSON-1799855642 tempest-ServerPasswordTestJSON-1799855642-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-19T11:52:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-19T11:51:55Z,direct_url=,disk_format='vmdk',id=fb764baa-3805-45c4-a694-aa91b0932110,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bd76085d004b408bbcf1ebf76ba2593e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-19T11:51:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 892.279246] env[62096]: DEBUG nova.virt.hardware [None req-d8bc9aca-755b-4171-8a37-b8947b203c4b tempest-ServerPasswordTestJSON-1799855642 tempest-ServerPasswordTestJSON-1799855642-project-member] Flavor limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 892.279467] env[62096]: DEBUG nova.virt.hardware [None req-d8bc9aca-755b-4171-8a37-b8947b203c4b tempest-ServerPasswordTestJSON-1799855642 tempest-ServerPasswordTestJSON-1799855642-project-member] Image limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 892.279710] env[62096]: DEBUG nova.virt.hardware [None req-d8bc9aca-755b-4171-8a37-b8947b203c4b tempest-ServerPasswordTestJSON-1799855642 tempest-ServerPasswordTestJSON-1799855642-project-member] Flavor pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 892.280328] env[62096]: DEBUG nova.virt.hardware [None req-d8bc9aca-755b-4171-8a37-b8947b203c4b tempest-ServerPasswordTestJSON-1799855642 tempest-ServerPasswordTestJSON-1799855642-project-member] Image pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 892.280328] env[62096]: DEBUG nova.virt.hardware [None req-d8bc9aca-755b-4171-8a37-b8947b203c4b tempest-ServerPasswordTestJSON-1799855642 tempest-ServerPasswordTestJSON-1799855642-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 892.280328] env[62096]: DEBUG nova.virt.hardware [None req-d8bc9aca-755b-4171-8a37-b8947b203c4b tempest-ServerPasswordTestJSON-1799855642 tempest-ServerPasswordTestJSON-1799855642-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 892.280627] env[62096]: DEBUG nova.virt.hardware [None req-d8bc9aca-755b-4171-8a37-b8947b203c4b tempest-ServerPasswordTestJSON-1799855642 tempest-ServerPasswordTestJSON-1799855642-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 892.280757] env[62096]: DEBUG nova.virt.hardware [None req-d8bc9aca-755b-4171-8a37-b8947b203c4b tempest-ServerPasswordTestJSON-1799855642 tempest-ServerPasswordTestJSON-1799855642-project-member] Got 1 possible topologies {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 892.280986] env[62096]: DEBUG nova.virt.hardware [None req-d8bc9aca-755b-4171-8a37-b8947b203c4b tempest-ServerPasswordTestJSON-1799855642 tempest-ServerPasswordTestJSON-1799855642-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 892.281226] env[62096]: DEBUG nova.virt.hardware [None req-d8bc9aca-755b-4171-8a37-b8947b203c4b tempest-ServerPasswordTestJSON-1799855642 tempest-ServerPasswordTestJSON-1799855642-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 892.282127] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbd66e80-e943-4a86-9d22-bcb52a67fe67 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.290376] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e86aa0a-4b86-439d-9fec-a8cff6873802 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.305040] env[62096]: ERROR nova.compute.manager [None req-d8bc9aca-755b-4171-8a37-b8947b203c4b tempest-ServerPasswordTestJSON-1799855642 tempest-ServerPasswordTestJSON-1799855642-project-member] [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 6c4e23f6-89de-4e27-b743-db4133b589d4, please check neutron logs for more information. [ 892.305040] env[62096]: ERROR nova.compute.manager [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] Traceback (most recent call last): [ 892.305040] env[62096]: ERROR nova.compute.manager [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 892.305040] env[62096]: ERROR nova.compute.manager [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] yield resources [ 892.305040] env[62096]: ERROR nova.compute.manager [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 892.305040] env[62096]: ERROR nova.compute.manager [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] self.driver.spawn(context, instance, image_meta, [ 892.305040] env[62096]: ERROR nova.compute.manager [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 892.305040] env[62096]: ERROR nova.compute.manager [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] self._vmops.spawn(context, instance, image_meta, injected_files, [ 892.305040] env[62096]: ERROR nova.compute.manager [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 892.305040] env[62096]: ERROR nova.compute.manager [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] vm_ref = self.build_virtual_machine(instance, [ 892.305040] env[62096]: ERROR nova.compute.manager [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 892.305443] env[62096]: ERROR nova.compute.manager [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] vif_infos = vmwarevif.get_vif_info(self._session, [ 892.305443] env[62096]: ERROR nova.compute.manager [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 892.305443] env[62096]: ERROR nova.compute.manager [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] for vif in network_info: [ 892.305443] env[62096]: ERROR nova.compute.manager [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 892.305443] env[62096]: ERROR nova.compute.manager [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] return self._sync_wrapper(fn, *args, **kwargs) [ 892.305443] env[62096]: ERROR nova.compute.manager [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 892.305443] env[62096]: ERROR nova.compute.manager [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] self.wait() [ 892.305443] env[62096]: ERROR nova.compute.manager [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 892.305443] env[62096]: ERROR nova.compute.manager [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] self[:] = self._gt.wait() [ 892.305443] env[62096]: ERROR nova.compute.manager [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 892.305443] env[62096]: ERROR nova.compute.manager [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] return self._exit_event.wait() [ 892.305443] env[62096]: ERROR nova.compute.manager [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 892.305443] env[62096]: ERROR nova.compute.manager [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] current.throw(*self._exc) [ 892.305801] env[62096]: ERROR nova.compute.manager [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 892.305801] env[62096]: ERROR nova.compute.manager [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] result = function(*args, **kwargs) [ 892.305801] env[62096]: ERROR nova.compute.manager [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 892.305801] env[62096]: ERROR nova.compute.manager [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] return func(*args, **kwargs) [ 892.305801] env[62096]: ERROR nova.compute.manager [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 892.305801] env[62096]: ERROR nova.compute.manager [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] raise e [ 892.305801] env[62096]: ERROR nova.compute.manager [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 892.305801] env[62096]: ERROR nova.compute.manager [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] nwinfo = self.network_api.allocate_for_instance( [ 892.305801] env[62096]: ERROR nova.compute.manager [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 892.305801] env[62096]: ERROR nova.compute.manager [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] created_port_ids = self._update_ports_for_instance( [ 892.305801] env[62096]: ERROR nova.compute.manager [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 892.305801] env[62096]: ERROR nova.compute.manager [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] with excutils.save_and_reraise_exception(): [ 892.305801] env[62096]: ERROR nova.compute.manager [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 892.306192] env[62096]: ERROR nova.compute.manager [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] self.force_reraise() [ 892.306192] env[62096]: ERROR nova.compute.manager [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 892.306192] env[62096]: ERROR nova.compute.manager [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] raise self.value [ 892.306192] env[62096]: ERROR nova.compute.manager [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 892.306192] env[62096]: ERROR nova.compute.manager [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] updated_port = self._update_port( [ 892.306192] env[62096]: ERROR nova.compute.manager [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 892.306192] env[62096]: ERROR nova.compute.manager [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] _ensure_no_port_binding_failure(port) [ 892.306192] env[62096]: ERROR nova.compute.manager [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 892.306192] env[62096]: ERROR nova.compute.manager [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] raise exception.PortBindingFailed(port_id=port['id']) [ 892.306192] env[62096]: ERROR nova.compute.manager [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] nova.exception.PortBindingFailed: Binding failed for port 6c4e23f6-89de-4e27-b743-db4133b589d4, please check neutron logs for more information. [ 892.306192] env[62096]: ERROR nova.compute.manager [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] [ 892.306192] env[62096]: INFO nova.compute.manager [None req-d8bc9aca-755b-4171-8a37-b8947b203c4b tempest-ServerPasswordTestJSON-1799855642 tempest-ServerPasswordTestJSON-1799855642-project-member] [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] Terminating instance [ 892.308393] env[62096]: DEBUG oslo_concurrency.lockutils [None req-d8bc9aca-755b-4171-8a37-b8947b203c4b tempest-ServerPasswordTestJSON-1799855642 tempest-ServerPasswordTestJSON-1799855642-project-member] Acquiring lock "refresh_cache-f3f90842-edaa-42b0-9b21-25a952fc8288" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 892.308626] env[62096]: DEBUG oslo_concurrency.lockutils [None req-d8bc9aca-755b-4171-8a37-b8947b203c4b tempest-ServerPasswordTestJSON-1799855642 tempest-ServerPasswordTestJSON-1799855642-project-member] Acquired lock "refresh_cache-f3f90842-edaa-42b0-9b21-25a952fc8288" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 892.308853] env[62096]: DEBUG nova.network.neutron [None req-d8bc9aca-755b-4171-8a37-b8947b203c4b tempest-ServerPasswordTestJSON-1799855642 tempest-ServerPasswordTestJSON-1799855642-project-member] [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 892.309318] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d8bc9aca-755b-4171-8a37-b8947b203c4b tempest-ServerPasswordTestJSON-1799855642 tempest-ServerPasswordTestJSON-1799855642-project-member] Expecting reply to msg 7dde0d0bda9540d1bb3ed56e46cdccdf in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 892.317311] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7dde0d0bda9540d1bb3ed56e46cdccdf [ 892.502266] env[62096]: DEBUG nova.network.neutron [-] [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 892.502701] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 9916123032ce4d60b50da4860697db71 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 892.511785] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9916123032ce4d60b50da4860697db71 [ 892.586503] env[62096]: DEBUG nova.scheduler.client.report [None req-bb5902e0-898e-4848-be60-0804ae5f57a5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 892.589356] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-bb5902e0-898e-4848-be60-0804ae5f57a5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg ac1b6d87d6724c3c86fda94324beae51 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 892.601942] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ac1b6d87d6724c3c86fda94324beae51 [ 892.669661] env[62096]: DEBUG oslo_vmware.api [None req-85f59f7f-6abf-41c4-87c1-d5831c2c0f52 tempest-ServersAdmin275Test-238925842 tempest-ServersAdmin275Test-238925842-project-admin] Task: {'id': task-397456, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.826196] env[62096]: DEBUG nova.network.neutron [None req-d8bc9aca-755b-4171-8a37-b8947b203c4b tempest-ServerPasswordTestJSON-1799855642 tempest-ServerPasswordTestJSON-1799855642-project-member] [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 892.962913] env[62096]: DEBUG nova.network.neutron [None req-d8bc9aca-755b-4171-8a37-b8947b203c4b tempest-ServerPasswordTestJSON-1799855642 tempest-ServerPasswordTestJSON-1799855642-project-member] [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 892.963442] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d8bc9aca-755b-4171-8a37-b8947b203c4b tempest-ServerPasswordTestJSON-1799855642 tempest-ServerPasswordTestJSON-1799855642-project-member] Expecting reply to msg 8d1d804db76f437ca9b482701116c37a in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 892.975229] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8d1d804db76f437ca9b482701116c37a [ 893.005489] env[62096]: INFO nova.compute.manager [-] [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] Took 1.03 seconds to deallocate network for instance. [ 893.011499] env[62096]: DEBUG nova.compute.claims [None req-66bcc827-9e2c-4570-a904-739ded1c169f tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] Aborting claim: {{(pid=62096) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 893.011715] env[62096]: DEBUG oslo_concurrency.lockutils [None req-66bcc827-9e2c-4570-a904-739ded1c169f tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 893.091772] env[62096]: DEBUG oslo_concurrency.lockutils [None req-bb5902e0-898e-4848-be60-0804ae5f57a5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.854s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 893.092551] env[62096]: ERROR nova.compute.manager [None req-bb5902e0-898e-4848-be60-0804ae5f57a5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: aa3e06b6-89f8-425f-8950-30deb025a473] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port c7c41772-9e80-4043-b62d-69cf4459ebc3, please check neutron logs for more information. [ 893.092551] env[62096]: ERROR nova.compute.manager [instance: aa3e06b6-89f8-425f-8950-30deb025a473] Traceback (most recent call last): [ 893.092551] env[62096]: ERROR nova.compute.manager [instance: aa3e06b6-89f8-425f-8950-30deb025a473] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 893.092551] env[62096]: ERROR nova.compute.manager [instance: aa3e06b6-89f8-425f-8950-30deb025a473] self.driver.spawn(context, instance, image_meta, [ 893.092551] env[62096]: ERROR nova.compute.manager [instance: aa3e06b6-89f8-425f-8950-30deb025a473] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 893.092551] env[62096]: ERROR nova.compute.manager [instance: aa3e06b6-89f8-425f-8950-30deb025a473] self._vmops.spawn(context, instance, image_meta, injected_files, [ 893.092551] env[62096]: ERROR nova.compute.manager [instance: aa3e06b6-89f8-425f-8950-30deb025a473] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 893.092551] env[62096]: ERROR nova.compute.manager [instance: aa3e06b6-89f8-425f-8950-30deb025a473] vm_ref = self.build_virtual_machine(instance, [ 893.092551] env[62096]: ERROR nova.compute.manager [instance: aa3e06b6-89f8-425f-8950-30deb025a473] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 893.092551] env[62096]: ERROR nova.compute.manager [instance: aa3e06b6-89f8-425f-8950-30deb025a473] vif_infos = vmwarevif.get_vif_info(self._session, [ 893.092551] env[62096]: ERROR nova.compute.manager [instance: aa3e06b6-89f8-425f-8950-30deb025a473] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 893.092874] env[62096]: ERROR nova.compute.manager [instance: aa3e06b6-89f8-425f-8950-30deb025a473] for vif in network_info: [ 893.092874] env[62096]: ERROR nova.compute.manager [instance: aa3e06b6-89f8-425f-8950-30deb025a473] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 893.092874] env[62096]: ERROR nova.compute.manager [instance: aa3e06b6-89f8-425f-8950-30deb025a473] return self._sync_wrapper(fn, *args, **kwargs) [ 893.092874] env[62096]: ERROR nova.compute.manager [instance: aa3e06b6-89f8-425f-8950-30deb025a473] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 893.092874] env[62096]: ERROR nova.compute.manager [instance: aa3e06b6-89f8-425f-8950-30deb025a473] self.wait() [ 893.092874] env[62096]: ERROR nova.compute.manager [instance: aa3e06b6-89f8-425f-8950-30deb025a473] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 893.092874] env[62096]: ERROR nova.compute.manager [instance: aa3e06b6-89f8-425f-8950-30deb025a473] self[:] = self._gt.wait() [ 893.092874] env[62096]: ERROR nova.compute.manager [instance: aa3e06b6-89f8-425f-8950-30deb025a473] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 893.092874] env[62096]: ERROR nova.compute.manager [instance: aa3e06b6-89f8-425f-8950-30deb025a473] return self._exit_event.wait() [ 893.092874] env[62096]: ERROR nova.compute.manager [instance: aa3e06b6-89f8-425f-8950-30deb025a473] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 893.092874] env[62096]: ERROR nova.compute.manager [instance: aa3e06b6-89f8-425f-8950-30deb025a473] current.throw(*self._exc) [ 893.092874] env[62096]: ERROR nova.compute.manager [instance: aa3e06b6-89f8-425f-8950-30deb025a473] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 893.092874] env[62096]: ERROR nova.compute.manager [instance: aa3e06b6-89f8-425f-8950-30deb025a473] result = function(*args, **kwargs) [ 893.093190] env[62096]: ERROR nova.compute.manager [instance: aa3e06b6-89f8-425f-8950-30deb025a473] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 893.093190] env[62096]: ERROR nova.compute.manager [instance: aa3e06b6-89f8-425f-8950-30deb025a473] return func(*args, **kwargs) [ 893.093190] env[62096]: ERROR nova.compute.manager [instance: aa3e06b6-89f8-425f-8950-30deb025a473] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 893.093190] env[62096]: ERROR nova.compute.manager [instance: aa3e06b6-89f8-425f-8950-30deb025a473] raise e [ 893.093190] env[62096]: ERROR nova.compute.manager [instance: aa3e06b6-89f8-425f-8950-30deb025a473] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 893.093190] env[62096]: ERROR nova.compute.manager [instance: aa3e06b6-89f8-425f-8950-30deb025a473] nwinfo = self.network_api.allocate_for_instance( [ 893.093190] env[62096]: ERROR nova.compute.manager [instance: aa3e06b6-89f8-425f-8950-30deb025a473] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 893.093190] env[62096]: ERROR nova.compute.manager [instance: aa3e06b6-89f8-425f-8950-30deb025a473] created_port_ids = self._update_ports_for_instance( [ 893.093190] env[62096]: ERROR nova.compute.manager [instance: aa3e06b6-89f8-425f-8950-30deb025a473] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 893.093190] env[62096]: ERROR nova.compute.manager [instance: aa3e06b6-89f8-425f-8950-30deb025a473] with excutils.save_and_reraise_exception(): [ 893.093190] env[62096]: ERROR nova.compute.manager [instance: aa3e06b6-89f8-425f-8950-30deb025a473] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 893.093190] env[62096]: ERROR nova.compute.manager [instance: aa3e06b6-89f8-425f-8950-30deb025a473] self.force_reraise() [ 893.093190] env[62096]: ERROR nova.compute.manager [instance: aa3e06b6-89f8-425f-8950-30deb025a473] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 893.093512] env[62096]: ERROR nova.compute.manager [instance: aa3e06b6-89f8-425f-8950-30deb025a473] raise self.value [ 893.093512] env[62096]: ERROR nova.compute.manager [instance: aa3e06b6-89f8-425f-8950-30deb025a473] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 893.093512] env[62096]: ERROR nova.compute.manager [instance: aa3e06b6-89f8-425f-8950-30deb025a473] updated_port = self._update_port( [ 893.093512] env[62096]: ERROR nova.compute.manager [instance: aa3e06b6-89f8-425f-8950-30deb025a473] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 893.093512] env[62096]: ERROR nova.compute.manager [instance: aa3e06b6-89f8-425f-8950-30deb025a473] _ensure_no_port_binding_failure(port) [ 893.093512] env[62096]: ERROR nova.compute.manager [instance: aa3e06b6-89f8-425f-8950-30deb025a473] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 893.093512] env[62096]: ERROR nova.compute.manager [instance: aa3e06b6-89f8-425f-8950-30deb025a473] raise exception.PortBindingFailed(port_id=port['id']) [ 893.093512] env[62096]: ERROR nova.compute.manager [instance: aa3e06b6-89f8-425f-8950-30deb025a473] nova.exception.PortBindingFailed: Binding failed for port c7c41772-9e80-4043-b62d-69cf4459ebc3, please check neutron logs for more information. [ 893.093512] env[62096]: ERROR nova.compute.manager [instance: aa3e06b6-89f8-425f-8950-30deb025a473] [ 893.093512] env[62096]: DEBUG nova.compute.utils [None req-bb5902e0-898e-4848-be60-0804ae5f57a5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: aa3e06b6-89f8-425f-8950-30deb025a473] Binding failed for port c7c41772-9e80-4043-b62d-69cf4459ebc3, please check neutron logs for more information. {{(pid=62096) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 893.095637] env[62096]: DEBUG nova.compute.manager [None req-bb5902e0-898e-4848-be60-0804ae5f57a5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: aa3e06b6-89f8-425f-8950-30deb025a473] Build of instance aa3e06b6-89f8-425f-8950-30deb025a473 was re-scheduled: Binding failed for port c7c41772-9e80-4043-b62d-69cf4459ebc3, please check neutron logs for more information. {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 893.096057] env[62096]: DEBUG nova.compute.manager [None req-bb5902e0-898e-4848-be60-0804ae5f57a5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: aa3e06b6-89f8-425f-8950-30deb025a473] Unplugging VIFs for instance {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 893.096297] env[62096]: DEBUG oslo_concurrency.lockutils [None req-bb5902e0-898e-4848-be60-0804ae5f57a5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Acquiring lock "refresh_cache-aa3e06b6-89f8-425f-8950-30deb025a473" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 893.096444] env[62096]: DEBUG oslo_concurrency.lockutils [None req-bb5902e0-898e-4848-be60-0804ae5f57a5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Acquired lock "refresh_cache-aa3e06b6-89f8-425f-8950-30deb025a473" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 893.096599] env[62096]: DEBUG nova.network.neutron [None req-bb5902e0-898e-4848-be60-0804ae5f57a5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: aa3e06b6-89f8-425f-8950-30deb025a473] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 893.097006] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-bb5902e0-898e-4848-be60-0804ae5f57a5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg 747cdce604234298b3830246f339a7fe in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 893.098168] env[62096]: DEBUG oslo_concurrency.lockutils [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 18.888s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 893.099224] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg 5c437ac283404453b8f7a2a361d6c445 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 893.112690] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 747cdce604234298b3830246f339a7fe [ 893.133030] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5c437ac283404453b8f7a2a361d6c445 [ 893.182171] env[62096]: DEBUG oslo_vmware.api [None req-85f59f7f-6abf-41c4-87c1-d5831c2c0f52 tempest-ServersAdmin275Test-238925842 tempest-ServersAdmin275Test-238925842-project-admin] Task: {'id': task-397456, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.471288] env[62096]: DEBUG oslo_concurrency.lockutils [None req-d8bc9aca-755b-4171-8a37-b8947b203c4b tempest-ServerPasswordTestJSON-1799855642 tempest-ServerPasswordTestJSON-1799855642-project-member] Releasing lock "refresh_cache-f3f90842-edaa-42b0-9b21-25a952fc8288" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 893.471288] env[62096]: DEBUG nova.compute.manager [None req-d8bc9aca-755b-4171-8a37-b8947b203c4b tempest-ServerPasswordTestJSON-1799855642 tempest-ServerPasswordTestJSON-1799855642-project-member] [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] Start destroying the instance on the hypervisor. {{(pid=62096) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 893.471288] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-d8bc9aca-755b-4171-8a37-b8947b203c4b tempest-ServerPasswordTestJSON-1799855642 tempest-ServerPasswordTestJSON-1799855642-project-member] [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] Destroying instance {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 893.471288] env[62096]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5184b0e5-c0fd-4ca3-b227-4d3b39a5c9a6 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.479240] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4721cfb3-4c41-4ac9-ac02-99a8775f765e {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.502691] env[62096]: WARNING nova.virt.vmwareapi.vmops [None req-d8bc9aca-755b-4171-8a37-b8947b203c4b tempest-ServerPasswordTestJSON-1799855642 tempest-ServerPasswordTestJSON-1799855642-project-member] [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance f3f90842-edaa-42b0-9b21-25a952fc8288 could not be found. [ 893.502890] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-d8bc9aca-755b-4171-8a37-b8947b203c4b tempest-ServerPasswordTestJSON-1799855642 tempest-ServerPasswordTestJSON-1799855642-project-member] [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] Instance destroyed {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 893.503069] env[62096]: INFO nova.compute.manager [None req-d8bc9aca-755b-4171-8a37-b8947b203c4b tempest-ServerPasswordTestJSON-1799855642 tempest-ServerPasswordTestJSON-1799855642-project-member] [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] Took 0.03 seconds to destroy the instance on the hypervisor. [ 893.503300] env[62096]: DEBUG oslo.service.loopingcall [None req-d8bc9aca-755b-4171-8a37-b8947b203c4b tempest-ServerPasswordTestJSON-1799855642 tempest-ServerPasswordTestJSON-1799855642-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 893.503512] env[62096]: DEBUG nova.compute.manager [-] [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 893.503603] env[62096]: DEBUG nova.network.neutron [-] [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 893.517397] env[62096]: DEBUG nova.network.neutron [-] [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 893.517824] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg be0ec127c44342e98ce17ed2ae461ecb in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 893.524469] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg be0ec127c44342e98ce17ed2ae461ecb [ 893.608505] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg 2363698fde234fa5a932af51cece3813 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 893.621399] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2363698fde234fa5a932af51cece3813 [ 893.629667] env[62096]: DEBUG nova.network.neutron [None req-bb5902e0-898e-4848-be60-0804ae5f57a5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: aa3e06b6-89f8-425f-8950-30deb025a473] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 893.673532] env[62096]: DEBUG oslo_vmware.api [None req-85f59f7f-6abf-41c4-87c1-d5831c2c0f52 tempest-ServersAdmin275Test-238925842 tempest-ServersAdmin275Test-238925842-project-admin] Task: {'id': task-397456, 'name': PowerOnVM_Task, 'duration_secs': 1.197506} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.673806] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [None req-85f59f7f-6abf-41c4-87c1-d5831c2c0f52 tempest-ServersAdmin275Test-238925842 tempest-ServersAdmin275Test-238925842-project-admin] [instance: 5627f913-29d2-476e-8fde-8ea457cc56f2] Powered on the VM {{(pid=62096) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 893.674012] env[62096]: DEBUG nova.compute.manager [None req-85f59f7f-6abf-41c4-87c1-d5831c2c0f52 tempest-ServersAdmin275Test-238925842 tempest-ServersAdmin275Test-238925842-project-admin] [instance: 5627f913-29d2-476e-8fde-8ea457cc56f2] Checking state {{(pid=62096) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 893.674802] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f33b3897-eaa3-44b6-b449-ee2345351eff {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.683500] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-85f59f7f-6abf-41c4-87c1-d5831c2c0f52 tempest-ServersAdmin275Test-238925842 tempest-ServersAdmin275Test-238925842-project-admin] Expecting reply to msg c3c078ad55404425866024efe5fcb552 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 893.700420] env[62096]: DEBUG nova.network.neutron [None req-bb5902e0-898e-4848-be60-0804ae5f57a5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: aa3e06b6-89f8-425f-8950-30deb025a473] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 893.700904] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-bb5902e0-898e-4848-be60-0804ae5f57a5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg d8fa0ae35d5941959417c920aa88bd7e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 893.709078] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d8fa0ae35d5941959417c920aa88bd7e [ 893.712669] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c3c078ad55404425866024efe5fcb552 [ 893.888346] env[62096]: DEBUG nova.compute.manager [req-80bfcd8a-b03f-4400-b26f-014b27dd8ad1 req-ead4be3b-d58f-4e51-be8e-b0f753a1b165 service nova] [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] Received event network-changed-6c4e23f6-89de-4e27-b743-db4133b589d4 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 893.888545] env[62096]: DEBUG nova.compute.manager [req-80bfcd8a-b03f-4400-b26f-014b27dd8ad1 req-ead4be3b-d58f-4e51-be8e-b0f753a1b165 service nova] [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] Refreshing instance network info cache due to event network-changed-6c4e23f6-89de-4e27-b743-db4133b589d4. {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 893.888758] env[62096]: DEBUG oslo_concurrency.lockutils [req-80bfcd8a-b03f-4400-b26f-014b27dd8ad1 req-ead4be3b-d58f-4e51-be8e-b0f753a1b165 service nova] Acquiring lock "refresh_cache-f3f90842-edaa-42b0-9b21-25a952fc8288" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 893.888898] env[62096]: DEBUG oslo_concurrency.lockutils [req-80bfcd8a-b03f-4400-b26f-014b27dd8ad1 req-ead4be3b-d58f-4e51-be8e-b0f753a1b165 service nova] Acquired lock "refresh_cache-f3f90842-edaa-42b0-9b21-25a952fc8288" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 893.889056] env[62096]: DEBUG nova.network.neutron [req-80bfcd8a-b03f-4400-b26f-014b27dd8ad1 req-ead4be3b-d58f-4e51-be8e-b0f753a1b165 service nova] [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] Refreshing network info cache for port 6c4e23f6-89de-4e27-b743-db4133b589d4 {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 893.889761] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-80bfcd8a-b03f-4400-b26f-014b27dd8ad1 req-ead4be3b-d58f-4e51-be8e-b0f753a1b165 service nova] Expecting reply to msg 4d7e229b98ed47248008b35377d43c29 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 893.896739] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4d7e229b98ed47248008b35377d43c29 [ 894.019740] env[62096]: DEBUG nova.network.neutron [-] [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 894.020251] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg b2484942a3f544f4b7d9856a83a3b55c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 894.029061] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b2484942a3f544f4b7d9856a83a3b55c [ 894.140030] env[62096]: WARNING nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Instance a7838568-9d47-4306-8bb6-9ad74ab1feb3 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 894.140666] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg d76c240cac04488abc99ea823bc3b10e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 894.150953] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d76c240cac04488abc99ea823bc3b10e [ 894.195121] env[62096]: DEBUG oslo_concurrency.lockutils [None req-85f59f7f-6abf-41c4-87c1-d5831c2c0f52 tempest-ServersAdmin275Test-238925842 tempest-ServersAdmin275Test-238925842-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 894.209372] env[62096]: DEBUG oslo_concurrency.lockutils [None req-bb5902e0-898e-4848-be60-0804ae5f57a5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Releasing lock "refresh_cache-aa3e06b6-89f8-425f-8950-30deb025a473" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 894.209629] env[62096]: DEBUG nova.compute.manager [None req-bb5902e0-898e-4848-be60-0804ae5f57a5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 894.209845] env[62096]: DEBUG nova.compute.manager [None req-bb5902e0-898e-4848-be60-0804ae5f57a5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: aa3e06b6-89f8-425f-8950-30deb025a473] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 894.210272] env[62096]: DEBUG nova.network.neutron [None req-bb5902e0-898e-4848-be60-0804ae5f57a5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: aa3e06b6-89f8-425f-8950-30deb025a473] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 894.225301] env[62096]: DEBUG nova.network.neutron [None req-bb5902e0-898e-4848-be60-0804ae5f57a5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: aa3e06b6-89f8-425f-8950-30deb025a473] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 894.225894] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-bb5902e0-898e-4848-be60-0804ae5f57a5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg e79511d10a4840e3a84f9d26450d143e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 894.241999] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e79511d10a4840e3a84f9d26450d143e [ 894.406313] env[62096]: DEBUG nova.network.neutron [req-80bfcd8a-b03f-4400-b26f-014b27dd8ad1 req-ead4be3b-d58f-4e51-be8e-b0f753a1b165 service nova] [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 894.473453] env[62096]: DEBUG nova.network.neutron [req-80bfcd8a-b03f-4400-b26f-014b27dd8ad1 req-ead4be3b-d58f-4e51-be8e-b0f753a1b165 service nova] [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 894.474006] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-80bfcd8a-b03f-4400-b26f-014b27dd8ad1 req-ead4be3b-d58f-4e51-be8e-b0f753a1b165 service nova] Expecting reply to msg 3122ed5c62fe4bf8b80ef46d069991aa in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 894.481996] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3122ed5c62fe4bf8b80ef46d069991aa [ 894.527183] env[62096]: INFO nova.compute.manager [-] [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] Took 1.02 seconds to deallocate network for instance. [ 894.529224] env[62096]: DEBUG nova.compute.claims [None req-d8bc9aca-755b-4171-8a37-b8947b203c4b tempest-ServerPasswordTestJSON-1799855642 tempest-ServerPasswordTestJSON-1799855642-project-member] [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] Aborting claim: {{(pid=62096) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 894.529405] env[62096]: DEBUG oslo_concurrency.lockutils [None req-d8bc9aca-755b-4171-8a37-b8947b203c4b tempest-ServerPasswordTestJSON-1799855642 tempest-ServerPasswordTestJSON-1799855642-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 894.648223] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Instance aa3e06b6-89f8-425f-8950-30deb025a473 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62096) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 894.648388] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Instance 5627f913-29d2-476e-8fde-8ea457cc56f2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62096) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 894.648513] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Instance 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62096) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 894.648631] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Instance 9aae3fde-f55a-4e90-a9b5-4594051183f4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62096) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 894.648744] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Instance 60bc5e14-f495-48da-8db0-54d75b523822 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62096) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 894.648854] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Instance f832a621-c221-4ae8-928e-d6f9fa6b0586 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62096) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 894.648964] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Instance ce80b353-2f7c-4165-b4fa-b81a1e539295 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62096) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 894.649073] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Instance f3f90842-edaa-42b0-9b21-25a952fc8288 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62096) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 894.649629] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg 7c2e3a849ccd4688921ede1d2141fbfd in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 894.659730] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7c2e3a849ccd4688921ede1d2141fbfd [ 894.740075] env[62096]: DEBUG nova.network.neutron [None req-bb5902e0-898e-4848-be60-0804ae5f57a5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: aa3e06b6-89f8-425f-8950-30deb025a473] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 894.740611] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-bb5902e0-898e-4848-be60-0804ae5f57a5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg 7d0df06067cf4c8da6ead46d08a0ddeb in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 894.748777] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7d0df06067cf4c8da6ead46d08a0ddeb [ 894.802150] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4398236f-4ae2-4ce8-81b8-15e3e6e18674 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Expecting reply to msg e24aa1ea792348e89a817302799396e6 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 894.813618] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e24aa1ea792348e89a817302799396e6 [ 894.976512] env[62096]: DEBUG oslo_concurrency.lockutils [req-80bfcd8a-b03f-4400-b26f-014b27dd8ad1 req-ead4be3b-d58f-4e51-be8e-b0f753a1b165 service nova] Releasing lock "refresh_cache-f3f90842-edaa-42b0-9b21-25a952fc8288" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 894.976776] env[62096]: DEBUG nova.compute.manager [req-80bfcd8a-b03f-4400-b26f-014b27dd8ad1 req-ead4be3b-d58f-4e51-be8e-b0f753a1b165 service nova] [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] Received event network-vif-deleted-6c4e23f6-89de-4e27-b743-db4133b589d4 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 895.152015] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Instance e480be29-d25d-4ff2-8de8-26d6c4078ca9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62096) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 895.152574] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg 6f9b90d0940343c09369ad34eb2cfebb in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 895.162714] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6f9b90d0940343c09369ad34eb2cfebb [ 895.242796] env[62096]: INFO nova.compute.manager [None req-bb5902e0-898e-4848-be60-0804ae5f57a5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: aa3e06b6-89f8-425f-8950-30deb025a473] Took 1.03 seconds to deallocate network for instance. [ 895.244587] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-bb5902e0-898e-4848-be60-0804ae5f57a5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg 8f5193f4314941a3820eb095327ed782 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 895.280783] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8f5193f4314941a3820eb095327ed782 [ 895.304104] env[62096]: DEBUG oslo_concurrency.lockutils [None req-4398236f-4ae2-4ce8-81b8-15e3e6e18674 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Acquiring lock "5627f913-29d2-476e-8fde-8ea457cc56f2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 895.304397] env[62096]: DEBUG oslo_concurrency.lockutils [None req-4398236f-4ae2-4ce8-81b8-15e3e6e18674 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Lock "5627f913-29d2-476e-8fde-8ea457cc56f2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 895.304611] env[62096]: DEBUG oslo_concurrency.lockutils [None req-4398236f-4ae2-4ce8-81b8-15e3e6e18674 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Acquiring lock "5627f913-29d2-476e-8fde-8ea457cc56f2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 895.304795] env[62096]: DEBUG oslo_concurrency.lockutils [None req-4398236f-4ae2-4ce8-81b8-15e3e6e18674 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Lock "5627f913-29d2-476e-8fde-8ea457cc56f2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 895.304961] env[62096]: DEBUG oslo_concurrency.lockutils [None req-4398236f-4ae2-4ce8-81b8-15e3e6e18674 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Lock "5627f913-29d2-476e-8fde-8ea457cc56f2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 895.313529] env[62096]: INFO nova.compute.manager [None req-4398236f-4ae2-4ce8-81b8-15e3e6e18674 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] [instance: 5627f913-29d2-476e-8fde-8ea457cc56f2] Terminating instance [ 895.315196] env[62096]: DEBUG oslo_concurrency.lockutils [None req-4398236f-4ae2-4ce8-81b8-15e3e6e18674 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Acquiring lock "refresh_cache-5627f913-29d2-476e-8fde-8ea457cc56f2" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 895.315349] env[62096]: DEBUG oslo_concurrency.lockutils [None req-4398236f-4ae2-4ce8-81b8-15e3e6e18674 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Acquired lock "refresh_cache-5627f913-29d2-476e-8fde-8ea457cc56f2" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 895.315508] env[62096]: DEBUG nova.network.neutron [None req-4398236f-4ae2-4ce8-81b8-15e3e6e18674 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] [instance: 5627f913-29d2-476e-8fde-8ea457cc56f2] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 895.315896] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4398236f-4ae2-4ce8-81b8-15e3e6e18674 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Expecting reply to msg b9e81b99af0f4f7f8ec603d0362bf58a in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 895.322120] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b9e81b99af0f4f7f8ec603d0362bf58a [ 895.655035] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Instance 1c02c726-ab23-49e5-8d55-b222c712225a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62096) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 895.655612] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg e79deda05dbf49428d0be83b00ddb48e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 895.666880] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e79deda05dbf49428d0be83b00ddb48e [ 895.749811] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-bb5902e0-898e-4848-be60-0804ae5f57a5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg 59511abf19df49018a79b6fc1c7880d3 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 895.779510] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 59511abf19df49018a79b6fc1c7880d3 [ 895.831999] env[62096]: DEBUG nova.network.neutron [None req-4398236f-4ae2-4ce8-81b8-15e3e6e18674 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] [instance: 5627f913-29d2-476e-8fde-8ea457cc56f2] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 895.885833] env[62096]: DEBUG nova.network.neutron [None req-4398236f-4ae2-4ce8-81b8-15e3e6e18674 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] [instance: 5627f913-29d2-476e-8fde-8ea457cc56f2] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 895.886413] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4398236f-4ae2-4ce8-81b8-15e3e6e18674 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Expecting reply to msg 4213fe858bf646b5b725c6d0e4bc57b7 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 895.895664] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4213fe858bf646b5b725c6d0e4bc57b7 [ 896.157791] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Instance 8802af61-3692-4e27-85f4-945afccff231 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62096) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 896.158448] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg d415b38f6fa3488187b775fb5315497f in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 896.168941] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d415b38f6fa3488187b775fb5315497f [ 896.269722] env[62096]: INFO nova.scheduler.client.report [None req-bb5902e0-898e-4848-be60-0804ae5f57a5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Deleted allocations for instance aa3e06b6-89f8-425f-8950-30deb025a473 [ 896.275547] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-bb5902e0-898e-4848-be60-0804ae5f57a5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg 20f56e4f76e641af8d01a05521e48f8b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 896.285001] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 20f56e4f76e641af8d01a05521e48f8b [ 896.389213] env[62096]: DEBUG oslo_concurrency.lockutils [None req-4398236f-4ae2-4ce8-81b8-15e3e6e18674 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Releasing lock "refresh_cache-5627f913-29d2-476e-8fde-8ea457cc56f2" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 896.389638] env[62096]: DEBUG nova.compute.manager [None req-4398236f-4ae2-4ce8-81b8-15e3e6e18674 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] [instance: 5627f913-29d2-476e-8fde-8ea457cc56f2] Start destroying the instance on the hypervisor. {{(pid=62096) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 896.389834] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-4398236f-4ae2-4ce8-81b8-15e3e6e18674 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] [instance: 5627f913-29d2-476e-8fde-8ea457cc56f2] Destroying instance {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 896.390813] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f1032e2-535e-49e1-9fc8-7dd29a8ca686 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.398845] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [None req-4398236f-4ae2-4ce8-81b8-15e3e6e18674 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] [instance: 5627f913-29d2-476e-8fde-8ea457cc56f2] Powering off the VM {{(pid=62096) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 896.399062] env[62096]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a970be5e-bebe-4795-afa3-80e88049078c {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.405142] env[62096]: DEBUG oslo_vmware.api [None req-4398236f-4ae2-4ce8-81b8-15e3e6e18674 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Waiting for the task: (returnval){ [ 896.405142] env[62096]: value = "task-397457" [ 896.405142] env[62096]: _type = "Task" [ 896.405142] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.413304] env[62096]: DEBUG oslo_vmware.api [None req-4398236f-4ae2-4ce8-81b8-15e3e6e18674 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Task: {'id': task-397457, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.660758] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Instance 0f275603-acfc-43db-8a71-a17af8e837b4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62096) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 896.661023] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Total usable vcpus: 48, total allocated vcpus: 7 {{(pid=62096) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 896.661171] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1856MB phys_disk=200GB used_disk=7GB total_vcpus=48 used_vcpus=7 pci_stats=[] {{(pid=62096) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 896.776962] env[62096]: DEBUG oslo_concurrency.lockutils [None req-bb5902e0-898e-4848-be60-0804ae5f57a5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Lock "aa3e06b6-89f8-425f-8950-30deb025a473" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 122.297s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 896.777796] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-0fdf712d-9ab7-47de-a5a9-f02790031e75 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg f865ffda64424466861f1108a73d9c80 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 896.789844] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f865ffda64424466861f1108a73d9c80 [ 896.811612] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-838ace10-03d4-4d4e-8883-5b020c4faece {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.820590] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba74d115-7b12-4d86-80e9-e635358bd0eb {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.851514] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d08b0dd6-d36d-42ba-b401-8bbcf8fc3917 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.859030] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1d44355-2d1f-44ca-8d86-879b1bcb3ed3 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.872487] env[62096]: DEBUG nova.compute.provider_tree [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 896.873000] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg 16b636e5dbe844278ec08c3dd2bbef23 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 896.880050] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 16b636e5dbe844278ec08c3dd2bbef23 [ 896.914841] env[62096]: DEBUG oslo_vmware.api [None req-4398236f-4ae2-4ce8-81b8-15e3e6e18674 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Task: {'id': task-397457, 'name': PowerOffVM_Task, 'duration_secs': 0.187827} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.915121] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [None req-4398236f-4ae2-4ce8-81b8-15e3e6e18674 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] [instance: 5627f913-29d2-476e-8fde-8ea457cc56f2] Powered off the VM {{(pid=62096) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 896.915255] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-4398236f-4ae2-4ce8-81b8-15e3e6e18674 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] [instance: 5627f913-29d2-476e-8fde-8ea457cc56f2] Unregistering the VM {{(pid=62096) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 896.915493] env[62096]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-37794137-7183-416a-99cc-9eb447f32304 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.939484] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-4398236f-4ae2-4ce8-81b8-15e3e6e18674 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] [instance: 5627f913-29d2-476e-8fde-8ea457cc56f2] Unregistered the VM {{(pid=62096) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 896.939716] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-4398236f-4ae2-4ce8-81b8-15e3e6e18674 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] [instance: 5627f913-29d2-476e-8fde-8ea457cc56f2] Deleting contents of the VM from datastore datastore2 {{(pid=62096) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 896.939897] env[62096]: DEBUG nova.virt.vmwareapi.ds_util [None req-4398236f-4ae2-4ce8-81b8-15e3e6e18674 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Deleting the datastore file [datastore2] 5627f913-29d2-476e-8fde-8ea457cc56f2 {{(pid=62096) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 896.940208] env[62096]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-eabbb901-5846-4bcd-86f9-a1d2b0a9a665 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.946180] env[62096]: DEBUG oslo_vmware.api [None req-4398236f-4ae2-4ce8-81b8-15e3e6e18674 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Waiting for the task: (returnval){ [ 896.946180] env[62096]: value = "task-397459" [ 896.946180] env[62096]: _type = "Task" [ 896.946180] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.956813] env[62096]: DEBUG oslo_vmware.api [None req-4398236f-4ae2-4ce8-81b8-15e3e6e18674 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Task: {'id': task-397459, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.280253] env[62096]: DEBUG nova.compute.manager [None req-0fdf712d-9ab7-47de-a5a9-f02790031e75 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] Starting instance... {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 897.281956] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-0fdf712d-9ab7-47de-a5a9-f02790031e75 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg b54b9d13d2bd46b1a6e794ed9f4952d5 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 897.314599] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b54b9d13d2bd46b1a6e794ed9f4952d5 [ 897.376304] env[62096]: DEBUG nova.scheduler.client.report [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 897.379075] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg 1474208bb7cd4fdab2511e1cef54bd15 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 897.389833] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1474208bb7cd4fdab2511e1cef54bd15 [ 897.456133] env[62096]: DEBUG oslo_vmware.api [None req-4398236f-4ae2-4ce8-81b8-15e3e6e18674 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Task: {'id': task-397459, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.084619} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.456566] env[62096]: DEBUG nova.virt.vmwareapi.ds_util [None req-4398236f-4ae2-4ce8-81b8-15e3e6e18674 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Deleted the datastore file {{(pid=62096) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 897.456901] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-4398236f-4ae2-4ce8-81b8-15e3e6e18674 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] [instance: 5627f913-29d2-476e-8fde-8ea457cc56f2] Deleted contents of the VM from datastore datastore2 {{(pid=62096) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 897.457216] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-4398236f-4ae2-4ce8-81b8-15e3e6e18674 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] [instance: 5627f913-29d2-476e-8fde-8ea457cc56f2] Instance destroyed {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 897.457599] env[62096]: INFO nova.compute.manager [None req-4398236f-4ae2-4ce8-81b8-15e3e6e18674 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] [instance: 5627f913-29d2-476e-8fde-8ea457cc56f2] Took 1.07 seconds to destroy the instance on the hypervisor. [ 897.457969] env[62096]: DEBUG oslo.service.loopingcall [None req-4398236f-4ae2-4ce8-81b8-15e3e6e18674 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 897.458278] env[62096]: DEBUG nova.compute.manager [-] [instance: 5627f913-29d2-476e-8fde-8ea457cc56f2] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 897.458483] env[62096]: DEBUG nova.network.neutron [-] [instance: 5627f913-29d2-476e-8fde-8ea457cc56f2] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 897.473127] env[62096]: DEBUG nova.network.neutron [-] [instance: 5627f913-29d2-476e-8fde-8ea457cc56f2] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 897.473552] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg cea94baec42c41bdb770e98ba443f2a0 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 897.480404] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cea94baec42c41bdb770e98ba443f2a0 [ 897.678491] env[62096]: DEBUG oslo_concurrency.lockutils [None req-66abb748-9105-4dd1-b487-dfa24b044df5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Acquiring lock "2d42e5f7-b293-4133-b279-63a8320bc19d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 897.678725] env[62096]: DEBUG oslo_concurrency.lockutils [None req-66abb748-9105-4dd1-b487-dfa24b044df5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Lock "2d42e5f7-b293-4133-b279-63a8320bc19d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 897.802327] env[62096]: DEBUG oslo_concurrency.lockutils [None req-0fdf712d-9ab7-47de-a5a9-f02790031e75 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 897.883638] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62096) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 897.883922] env[62096]: DEBUG oslo_concurrency.lockutils [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 4.786s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 897.884265] env[62096]: DEBUG oslo_concurrency.lockutils [None req-787cee2b-5948-46d6-9396-dac712351d47 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.760s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 897.884452] env[62096]: DEBUG oslo_concurrency.lockutils [None req-787cee2b-5948-46d6-9396-dac712351d47 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 897.886503] env[62096]: DEBUG oslo_concurrency.lockutils [None req-3fd8e1c2-0ecc-40e5-90b0-01e465c5f366 tempest-ServersV294TestFqdnHostnames-2113678617 tempest-ServersV294TestFqdnHostnames-2113678617-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 20.755s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 897.888514] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-3fd8e1c2-0ecc-40e5-90b0-01e465c5f366 tempest-ServersV294TestFqdnHostnames-2113678617 tempest-ServersV294TestFqdnHostnames-2113678617-project-member] Expecting reply to msg c698723404e14557aecf745c9baa68ff in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 897.909550] env[62096]: INFO nova.scheduler.client.report [None req-787cee2b-5948-46d6-9396-dac712351d47 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Deleted allocations for instance a7838568-9d47-4306-8bb6-9ad74ab1feb3 [ 897.912130] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-787cee2b-5948-46d6-9396-dac712351d47 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Expecting reply to msg e34094e25f9a44d281c77f6331f660d7 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 897.924234] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c698723404e14557aecf745c9baa68ff [ 897.965524] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e34094e25f9a44d281c77f6331f660d7 [ 897.975813] env[62096]: DEBUG nova.network.neutron [-] [instance: 5627f913-29d2-476e-8fde-8ea457cc56f2] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 897.976282] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 7a9b65a114834b2d994ca5694fa6e7bf in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 897.984153] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7a9b65a114834b2d994ca5694fa6e7bf [ 898.417722] env[62096]: DEBUG oslo_concurrency.lockutils [None req-787cee2b-5948-46d6-9396-dac712351d47 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Lock "a7838568-9d47-4306-8bb6-9ad74ab1feb3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 25.999s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 898.418063] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-787cee2b-5948-46d6-9396-dac712351d47 tempest-ServerShowV254Test-1158686086 tempest-ServerShowV254Test-1158686086-project-member] Expecting reply to msg 7e3e7e6641da439e8234437b8cfd877c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 898.428746] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7e3e7e6641da439e8234437b8cfd877c [ 898.478260] env[62096]: INFO nova.compute.manager [-] [instance: 5627f913-29d2-476e-8fde-8ea457cc56f2] Took 1.02 seconds to deallocate network for instance. [ 898.482021] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4398236f-4ae2-4ce8-81b8-15e3e6e18674 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Expecting reply to msg 0c2276b9f8ee4fc78222f0027d09a166 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 898.508823] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0c2276b9f8ee4fc78222f0027d09a166 [ 898.538095] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7a78e55-97d7-4bae-8d60-2c53395c8627 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.545920] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9ceff9b-63f3-4d53-aaee-8400519dbe62 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.576856] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1518fb48-6215-48c3-a149-6b815927aa9d {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.584929] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dd5a07a-0c3d-416b-bdb9-eb1a4eef8c2e {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.598117] env[62096]: DEBUG nova.compute.provider_tree [None req-3fd8e1c2-0ecc-40e5-90b0-01e465c5f366 tempest-ServersV294TestFqdnHostnames-2113678617 tempest-ServersV294TestFqdnHostnames-2113678617-project-member] Updating inventory in ProviderTree for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 898.598814] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-3fd8e1c2-0ecc-40e5-90b0-01e465c5f366 tempest-ServersV294TestFqdnHostnames-2113678617 tempest-ServersV294TestFqdnHostnames-2113678617-project-member] Expecting reply to msg 942e2e9aa11741da9a273b21483320b1 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 898.607705] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 942e2e9aa11741da9a273b21483320b1 [ 898.984435] env[62096]: DEBUG oslo_concurrency.lockutils [None req-4398236f-4ae2-4ce8-81b8-15e3e6e18674 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 899.117591] env[62096]: ERROR nova.scheduler.client.report [None req-3fd8e1c2-0ecc-40e5-90b0-01e465c5f366 tempest-ServersV294TestFqdnHostnames-2113678617 tempest-ServersV294TestFqdnHostnames-2113678617-project-member] [req-ee939e89-c73b-42ea-b7af-97671682defd] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 6eefe13c-ab55-4c03-987f-47a62756c3b3. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-ee939e89-c73b-42ea-b7af-97671682defd"}]}: nova.exception.PortBindingFailed: Binding failed for port 4b8eb4ba-d577-4e7a-8576-502e273b223b, please check neutron logs for more information. [ 899.133003] env[62096]: DEBUG nova.scheduler.client.report [None req-3fd8e1c2-0ecc-40e5-90b0-01e465c5f366 tempest-ServersV294TestFqdnHostnames-2113678617 tempest-ServersV294TestFqdnHostnames-2113678617-project-member] Refreshing inventories for resource provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 899.145620] env[62096]: DEBUG nova.scheduler.client.report [None req-3fd8e1c2-0ecc-40e5-90b0-01e465c5f366 tempest-ServersV294TestFqdnHostnames-2113678617 tempest-ServersV294TestFqdnHostnames-2113678617-project-member] Updating ProviderTree inventory for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 899.145832] env[62096]: DEBUG nova.compute.provider_tree [None req-3fd8e1c2-0ecc-40e5-90b0-01e465c5f366 tempest-ServersV294TestFqdnHostnames-2113678617 tempest-ServersV294TestFqdnHostnames-2113678617-project-member] Updating inventory in ProviderTree for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 899.156404] env[62096]: DEBUG nova.scheduler.client.report [None req-3fd8e1c2-0ecc-40e5-90b0-01e465c5f366 tempest-ServersV294TestFqdnHostnames-2113678617 tempest-ServersV294TestFqdnHostnames-2113678617-project-member] Refreshing aggregate associations for resource provider 6eefe13c-ab55-4c03-987f-47a62756c3b3, aggregates: None {{(pid=62096) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 899.173590] env[62096]: DEBUG nova.scheduler.client.report [None req-3fd8e1c2-0ecc-40e5-90b0-01e465c5f366 tempest-ServersV294TestFqdnHostnames-2113678617 tempest-ServersV294TestFqdnHostnames-2113678617-project-member] Refreshing trait associations for resource provider 6eefe13c-ab55-4c03-987f-47a62756c3b3, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO {{(pid=62096) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 899.314056] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2507fd2c-8065-4a3b-adc0-44e9b3d7e3be {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.321487] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d40c3d4-325c-4376-8e7c-126b265ce1b8 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.352088] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90c5d1dc-16a9-4aa1-bc10-2f522b888fc3 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.359216] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ea4abdf-6983-4508-b525-df129d75cd44 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.371761] env[62096]: DEBUG nova.compute.provider_tree [None req-3fd8e1c2-0ecc-40e5-90b0-01e465c5f366 tempest-ServersV294TestFqdnHostnames-2113678617 tempest-ServersV294TestFqdnHostnames-2113678617-project-member] Updating inventory in ProviderTree for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 899.372382] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-3fd8e1c2-0ecc-40e5-90b0-01e465c5f366 tempest-ServersV294TestFqdnHostnames-2113678617 tempest-ServersV294TestFqdnHostnames-2113678617-project-member] Expecting reply to msg a2644844732f408f8d48f95550fed469 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 899.379703] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a2644844732f408f8d48f95550fed469 [ 899.906482] env[62096]: DEBUG nova.scheduler.client.report [None req-3fd8e1c2-0ecc-40e5-90b0-01e465c5f366 tempest-ServersV294TestFqdnHostnames-2113678617 tempest-ServersV294TestFqdnHostnames-2113678617-project-member] Updated inventory for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 with generation 99 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:957}} [ 899.906782] env[62096]: DEBUG nova.compute.provider_tree [None req-3fd8e1c2-0ecc-40e5-90b0-01e465c5f366 tempest-ServersV294TestFqdnHostnames-2113678617 tempest-ServersV294TestFqdnHostnames-2113678617-project-member] Updating resource provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 generation from 99 to 100 during operation: update_inventory {{(pid=62096) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 899.906933] env[62096]: DEBUG nova.compute.provider_tree [None req-3fd8e1c2-0ecc-40e5-90b0-01e465c5f366 tempest-ServersV294TestFqdnHostnames-2113678617 tempest-ServersV294TestFqdnHostnames-2113678617-project-member] Updating inventory in ProviderTree for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 899.909449] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-3fd8e1c2-0ecc-40e5-90b0-01e465c5f366 tempest-ServersV294TestFqdnHostnames-2113678617 tempest-ServersV294TestFqdnHostnames-2113678617-project-member] Expecting reply to msg e75e78b33cc24df8b6b1119526e99bf6 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 899.921074] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e75e78b33cc24df8b6b1119526e99bf6 [ 900.412201] env[62096]: DEBUG oslo_concurrency.lockutils [None req-3fd8e1c2-0ecc-40e5-90b0-01e465c5f366 tempest-ServersV294TestFqdnHostnames-2113678617 tempest-ServersV294TestFqdnHostnames-2113678617-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.525s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 900.412373] env[62096]: ERROR nova.compute.manager [None req-3fd8e1c2-0ecc-40e5-90b0-01e465c5f366 tempest-ServersV294TestFqdnHostnames-2113678617 tempest-ServersV294TestFqdnHostnames-2113678617-project-member] [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 4b8eb4ba-d577-4e7a-8576-502e273b223b, please check neutron logs for more information. [ 900.412373] env[62096]: ERROR nova.compute.manager [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] Traceback (most recent call last): [ 900.412373] env[62096]: ERROR nova.compute.manager [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 900.412373] env[62096]: ERROR nova.compute.manager [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] self.driver.spawn(context, instance, image_meta, [ 900.412373] env[62096]: ERROR nova.compute.manager [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 900.412373] env[62096]: ERROR nova.compute.manager [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] self._vmops.spawn(context, instance, image_meta, injected_files, [ 900.412373] env[62096]: ERROR nova.compute.manager [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 900.412373] env[62096]: ERROR nova.compute.manager [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] vm_ref = self.build_virtual_machine(instance, [ 900.412373] env[62096]: ERROR nova.compute.manager [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 900.412373] env[62096]: ERROR nova.compute.manager [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] vif_infos = vmwarevif.get_vif_info(self._session, [ 900.412373] env[62096]: ERROR nova.compute.manager [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 900.412705] env[62096]: ERROR nova.compute.manager [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] for vif in network_info: [ 900.412705] env[62096]: ERROR nova.compute.manager [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 900.412705] env[62096]: ERROR nova.compute.manager [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] return self._sync_wrapper(fn, *args, **kwargs) [ 900.412705] env[62096]: ERROR nova.compute.manager [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 900.412705] env[62096]: ERROR nova.compute.manager [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] self.wait() [ 900.412705] env[62096]: ERROR nova.compute.manager [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 900.412705] env[62096]: ERROR nova.compute.manager [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] self[:] = self._gt.wait() [ 900.412705] env[62096]: ERROR nova.compute.manager [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 900.412705] env[62096]: ERROR nova.compute.manager [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] return self._exit_event.wait() [ 900.412705] env[62096]: ERROR nova.compute.manager [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 900.412705] env[62096]: ERROR nova.compute.manager [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] current.throw(*self._exc) [ 900.412705] env[62096]: ERROR nova.compute.manager [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 900.412705] env[62096]: ERROR nova.compute.manager [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] result = function(*args, **kwargs) [ 900.413043] env[62096]: ERROR nova.compute.manager [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 900.413043] env[62096]: ERROR nova.compute.manager [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] return func(*args, **kwargs) [ 900.413043] env[62096]: ERROR nova.compute.manager [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 900.413043] env[62096]: ERROR nova.compute.manager [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] raise e [ 900.413043] env[62096]: ERROR nova.compute.manager [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 900.413043] env[62096]: ERROR nova.compute.manager [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] nwinfo = self.network_api.allocate_for_instance( [ 900.413043] env[62096]: ERROR nova.compute.manager [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 900.413043] env[62096]: ERROR nova.compute.manager [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] created_port_ids = self._update_ports_for_instance( [ 900.413043] env[62096]: ERROR nova.compute.manager [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 900.413043] env[62096]: ERROR nova.compute.manager [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] with excutils.save_and_reraise_exception(): [ 900.413043] env[62096]: ERROR nova.compute.manager [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 900.413043] env[62096]: ERROR nova.compute.manager [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] self.force_reraise() [ 900.413043] env[62096]: ERROR nova.compute.manager [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 900.413398] env[62096]: ERROR nova.compute.manager [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] raise self.value [ 900.413398] env[62096]: ERROR nova.compute.manager [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 900.413398] env[62096]: ERROR nova.compute.manager [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] updated_port = self._update_port( [ 900.413398] env[62096]: ERROR nova.compute.manager [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 900.413398] env[62096]: ERROR nova.compute.manager [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] _ensure_no_port_binding_failure(port) [ 900.413398] env[62096]: ERROR nova.compute.manager [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 900.413398] env[62096]: ERROR nova.compute.manager [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] raise exception.PortBindingFailed(port_id=port['id']) [ 900.413398] env[62096]: ERROR nova.compute.manager [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] nova.exception.PortBindingFailed: Binding failed for port 4b8eb4ba-d577-4e7a-8576-502e273b223b, please check neutron logs for more information. [ 900.413398] env[62096]: ERROR nova.compute.manager [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] [ 900.413398] env[62096]: DEBUG nova.compute.utils [None req-3fd8e1c2-0ecc-40e5-90b0-01e465c5f366 tempest-ServersV294TestFqdnHostnames-2113678617 tempest-ServersV294TestFqdnHostnames-2113678617-project-member] [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] Binding failed for port 4b8eb4ba-d577-4e7a-8576-502e273b223b, please check neutron logs for more information. {{(pid=62096) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 900.414918] env[62096]: DEBUG oslo_concurrency.lockutils [None req-423ed1ce-80ce-46c1-a5f7-b083eaadd697 tempest-ServerTagsTestJSON-1314597396 tempest-ServerTagsTestJSON-1314597396-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.845s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 900.416364] env[62096]: INFO nova.compute.claims [None req-423ed1ce-80ce-46c1-a5f7-b083eaadd697 tempest-ServerTagsTestJSON-1314597396 tempest-ServerTagsTestJSON-1314597396-project-member] [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 900.417928] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-423ed1ce-80ce-46c1-a5f7-b083eaadd697 tempest-ServerTagsTestJSON-1314597396 tempest-ServerTagsTestJSON-1314597396-project-member] Expecting reply to msg 62ce8d8e813f4768b8708cdffe8c4772 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 900.419262] env[62096]: DEBUG nova.compute.manager [None req-3fd8e1c2-0ecc-40e5-90b0-01e465c5f366 tempest-ServersV294TestFqdnHostnames-2113678617 tempest-ServersV294TestFqdnHostnames-2113678617-project-member] [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] Build of instance 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78 was re-scheduled: Binding failed for port 4b8eb4ba-d577-4e7a-8576-502e273b223b, please check neutron logs for more information. {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 900.419729] env[62096]: DEBUG nova.compute.manager [None req-3fd8e1c2-0ecc-40e5-90b0-01e465c5f366 tempest-ServersV294TestFqdnHostnames-2113678617 tempest-ServersV294TestFqdnHostnames-2113678617-project-member] [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] Unplugging VIFs for instance {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 900.419963] env[62096]: DEBUG oslo_concurrency.lockutils [None req-3fd8e1c2-0ecc-40e5-90b0-01e465c5f366 tempest-ServersV294TestFqdnHostnames-2113678617 tempest-ServersV294TestFqdnHostnames-2113678617-project-member] Acquiring lock "refresh_cache-9b85f3a6-892f-4eb4-b0d8-aacaf6576c78" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 900.420123] env[62096]: DEBUG oslo_concurrency.lockutils [None req-3fd8e1c2-0ecc-40e5-90b0-01e465c5f366 tempest-ServersV294TestFqdnHostnames-2113678617 tempest-ServersV294TestFqdnHostnames-2113678617-project-member] Acquired lock "refresh_cache-9b85f3a6-892f-4eb4-b0d8-aacaf6576c78" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 900.420284] env[62096]: DEBUG nova.network.neutron [None req-3fd8e1c2-0ecc-40e5-90b0-01e465c5f366 tempest-ServersV294TestFqdnHostnames-2113678617 tempest-ServersV294TestFqdnHostnames-2113678617-project-member] [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 900.420666] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-3fd8e1c2-0ecc-40e5-90b0-01e465c5f366 tempest-ServersV294TestFqdnHostnames-2113678617 tempest-ServersV294TestFqdnHostnames-2113678617-project-member] Expecting reply to msg 9723307e782a4f9cbc28d68764ffb0f6 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 900.433669] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9723307e782a4f9cbc28d68764ffb0f6 [ 900.454623] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 62ce8d8e813f4768b8708cdffe8c4772 [ 900.923796] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-423ed1ce-80ce-46c1-a5f7-b083eaadd697 tempest-ServerTagsTestJSON-1314597396 tempest-ServerTagsTestJSON-1314597396-project-member] Expecting reply to msg 5d9aa5ae6cf8422fb4419b75e99f8943 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 900.931727] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5d9aa5ae6cf8422fb4419b75e99f8943 [ 900.939704] env[62096]: DEBUG nova.network.neutron [None req-3fd8e1c2-0ecc-40e5-90b0-01e465c5f366 tempest-ServersV294TestFqdnHostnames-2113678617 tempest-ServersV294TestFqdnHostnames-2113678617-project-member] [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 901.027744] env[62096]: DEBUG nova.network.neutron [None req-3fd8e1c2-0ecc-40e5-90b0-01e465c5f366 tempest-ServersV294TestFqdnHostnames-2113678617 tempest-ServersV294TestFqdnHostnames-2113678617-project-member] [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 901.028306] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-3fd8e1c2-0ecc-40e5-90b0-01e465c5f366 tempest-ServersV294TestFqdnHostnames-2113678617 tempest-ServersV294TestFqdnHostnames-2113678617-project-member] Expecting reply to msg 8d9e121bfbe74acea491c3a9ee84b60f in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 901.036851] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8d9e121bfbe74acea491c3a9ee84b60f [ 901.531606] env[62096]: DEBUG oslo_concurrency.lockutils [None req-3fd8e1c2-0ecc-40e5-90b0-01e465c5f366 tempest-ServersV294TestFqdnHostnames-2113678617 tempest-ServersV294TestFqdnHostnames-2113678617-project-member] Releasing lock "refresh_cache-9b85f3a6-892f-4eb4-b0d8-aacaf6576c78" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 901.531913] env[62096]: DEBUG nova.compute.manager [None req-3fd8e1c2-0ecc-40e5-90b0-01e465c5f366 tempest-ServersV294TestFqdnHostnames-2113678617 tempest-ServersV294TestFqdnHostnames-2113678617-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 901.532127] env[62096]: DEBUG nova.compute.manager [None req-3fd8e1c2-0ecc-40e5-90b0-01e465c5f366 tempest-ServersV294TestFqdnHostnames-2113678617 tempest-ServersV294TestFqdnHostnames-2113678617-project-member] [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 901.532304] env[62096]: DEBUG nova.network.neutron [None req-3fd8e1c2-0ecc-40e5-90b0-01e465c5f366 tempest-ServersV294TestFqdnHostnames-2113678617 tempest-ServersV294TestFqdnHostnames-2113678617-project-member] [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 901.546451] env[62096]: DEBUG nova.network.neutron [None req-3fd8e1c2-0ecc-40e5-90b0-01e465c5f366 tempest-ServersV294TestFqdnHostnames-2113678617 tempest-ServersV294TestFqdnHostnames-2113678617-project-member] [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 901.546994] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-3fd8e1c2-0ecc-40e5-90b0-01e465c5f366 tempest-ServersV294TestFqdnHostnames-2113678617 tempest-ServersV294TestFqdnHostnames-2113678617-project-member] Expecting reply to msg 81c6a69a2f814e62aa934d1f2bb29d45 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 901.554642] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 81c6a69a2f814e62aa934d1f2bb29d45 [ 901.585745] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-992faf32-0428-4e59-950c-b6cd8d99984b {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.593521] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07709cc6-0fa5-434d-b779-27118ef91006 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.623698] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4546d9b-e281-4e2d-a9f1-803ea932ad5a {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.632037] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5bdcf1e-3390-4ce1-9cac-0d34a5392da1 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.644774] env[62096]: DEBUG nova.compute.provider_tree [None req-423ed1ce-80ce-46c1-a5f7-b083eaadd697 tempest-ServerTagsTestJSON-1314597396 tempest-ServerTagsTestJSON-1314597396-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 901.645308] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-423ed1ce-80ce-46c1-a5f7-b083eaadd697 tempest-ServerTagsTestJSON-1314597396 tempest-ServerTagsTestJSON-1314597396-project-member] Expecting reply to msg d5089bb37ecb4ff9b9ac18d8c45b192a in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 901.657158] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d5089bb37ecb4ff9b9ac18d8c45b192a [ 902.048830] env[62096]: DEBUG nova.network.neutron [None req-3fd8e1c2-0ecc-40e5-90b0-01e465c5f366 tempest-ServersV294TestFqdnHostnames-2113678617 tempest-ServersV294TestFqdnHostnames-2113678617-project-member] [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 902.049332] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-3fd8e1c2-0ecc-40e5-90b0-01e465c5f366 tempest-ServersV294TestFqdnHostnames-2113678617 tempest-ServersV294TestFqdnHostnames-2113678617-project-member] Expecting reply to msg 91015534f00f4f5a99f8ac3cf27552b9 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 902.057690] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 91015534f00f4f5a99f8ac3cf27552b9 [ 902.151987] env[62096]: DEBUG nova.scheduler.client.report [None req-423ed1ce-80ce-46c1-a5f7-b083eaadd697 tempest-ServerTagsTestJSON-1314597396 tempest-ServerTagsTestJSON-1314597396-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 902.154316] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-423ed1ce-80ce-46c1-a5f7-b083eaadd697 tempest-ServerTagsTestJSON-1314597396 tempest-ServerTagsTestJSON-1314597396-project-member] Expecting reply to msg 71484512dc8e4262a8a4e928392324c5 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 902.168468] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 71484512dc8e4262a8a4e928392324c5 [ 902.552398] env[62096]: INFO nova.compute.manager [None req-3fd8e1c2-0ecc-40e5-90b0-01e465c5f366 tempest-ServersV294TestFqdnHostnames-2113678617 tempest-ServersV294TestFqdnHostnames-2113678617-project-member] [instance: 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78] Took 1.02 seconds to deallocate network for instance. [ 902.554440] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-3fd8e1c2-0ecc-40e5-90b0-01e465c5f366 tempest-ServersV294TestFqdnHostnames-2113678617 tempest-ServersV294TestFqdnHostnames-2113678617-project-member] Expecting reply to msg 8d290194a6f940d4bf1ebf440bd6e13c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 902.585401] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8d290194a6f940d4bf1ebf440bd6e13c [ 902.656815] env[62096]: DEBUG oslo_concurrency.lockutils [None req-423ed1ce-80ce-46c1-a5f7-b083eaadd697 tempest-ServerTagsTestJSON-1314597396 tempest-ServerTagsTestJSON-1314597396-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.242s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 902.657588] env[62096]: DEBUG nova.compute.manager [None req-423ed1ce-80ce-46c1-a5f7-b083eaadd697 tempest-ServerTagsTestJSON-1314597396 tempest-ServerTagsTestJSON-1314597396-project-member] [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] Start building networks asynchronously for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 902.659359] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-423ed1ce-80ce-46c1-a5f7-b083eaadd697 tempest-ServerTagsTestJSON-1314597396 tempest-ServerTagsTestJSON-1314597396-project-member] Expecting reply to msg 24179cf65445403b98c5ba30eeb2adcd in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 902.660480] env[62096]: DEBUG oslo_concurrency.lockutils [None req-b46ae41d-7569-429f-b0df-d60640b27629 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.831s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 902.662652] env[62096]: INFO nova.compute.claims [None req-b46ae41d-7569-429f-b0df-d60640b27629 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 902.664364] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b46ae41d-7569-429f-b0df-d60640b27629 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Expecting reply to msg d8fec3c8f8f04df9b2f9946e67f9e037 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 902.688706] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 24179cf65445403b98c5ba30eeb2adcd [ 902.698500] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d8fec3c8f8f04df9b2f9946e67f9e037 [ 903.059539] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-3fd8e1c2-0ecc-40e5-90b0-01e465c5f366 tempest-ServersV294TestFqdnHostnames-2113678617 tempest-ServersV294TestFqdnHostnames-2113678617-project-member] Expecting reply to msg 7d2536afe9c84025b5d45567dad981c9 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 903.089801] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7d2536afe9c84025b5d45567dad981c9 [ 903.169386] env[62096]: DEBUG nova.compute.utils [None req-423ed1ce-80ce-46c1-a5f7-b083eaadd697 tempest-ServerTagsTestJSON-1314597396 tempest-ServerTagsTestJSON-1314597396-project-member] Using /dev/sd instead of None {{(pid=62096) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 903.169988] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-423ed1ce-80ce-46c1-a5f7-b083eaadd697 tempest-ServerTagsTestJSON-1314597396 tempest-ServerTagsTestJSON-1314597396-project-member] Expecting reply to msg 39a5255896b94a03b697804e187d0e2d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 903.172045] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b46ae41d-7569-429f-b0df-d60640b27629 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Expecting reply to msg 21c182d4040d4211a2ef9afb7f8dd431 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 903.174363] env[62096]: DEBUG nova.compute.manager [None req-423ed1ce-80ce-46c1-a5f7-b083eaadd697 tempest-ServerTagsTestJSON-1314597396 tempest-ServerTagsTestJSON-1314597396-project-member] [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] Allocating IP information in the background. {{(pid=62096) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 903.174363] env[62096]: DEBUG nova.network.neutron [None req-423ed1ce-80ce-46c1-a5f7-b083eaadd697 tempest-ServerTagsTestJSON-1314597396 tempest-ServerTagsTestJSON-1314597396-project-member] [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] allocate_for_instance() {{(pid=62096) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 903.178551] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 21c182d4040d4211a2ef9afb7f8dd431 [ 903.179322] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 39a5255896b94a03b697804e187d0e2d [ 903.211209] env[62096]: DEBUG nova.policy [None req-423ed1ce-80ce-46c1-a5f7-b083eaadd697 tempest-ServerTagsTestJSON-1314597396 tempest-ServerTagsTestJSON-1314597396-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2d100073e3ad41fc9e148e88cbe4df83', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4e095225e2504807972b7b0060659ab4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62096) authorize /opt/stack/nova/nova/policy.py:203}} [ 903.466423] env[62096]: DEBUG nova.network.neutron [None req-423ed1ce-80ce-46c1-a5f7-b083eaadd697 tempest-ServerTagsTestJSON-1314597396 tempest-ServerTagsTestJSON-1314597396-project-member] [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] Successfully created port: 843c145b-efae-4f01-a279-a36832055d07 {{(pid=62096) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 903.581376] env[62096]: INFO nova.scheduler.client.report [None req-3fd8e1c2-0ecc-40e5-90b0-01e465c5f366 tempest-ServersV294TestFqdnHostnames-2113678617 tempest-ServersV294TestFqdnHostnames-2113678617-project-member] Deleted allocations for instance 9b85f3a6-892f-4eb4-b0d8-aacaf6576c78 [ 903.590349] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-3fd8e1c2-0ecc-40e5-90b0-01e465c5f366 tempest-ServersV294TestFqdnHostnames-2113678617 tempest-ServersV294TestFqdnHostnames-2113678617-project-member] Expecting reply to msg f11833bbef5d4a669c0ba8dc525c88a6 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 903.611192] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f11833bbef5d4a669c0ba8dc525c88a6 [ 903.674344] env[62096]: DEBUG nova.compute.manager [None req-423ed1ce-80ce-46c1-a5f7-b083eaadd697 tempest-ServerTagsTestJSON-1314597396 tempest-ServerTagsTestJSON-1314597396-project-member] [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] Start building block device mappings for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 903.676766] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-423ed1ce-80ce-46c1-a5f7-b083eaadd697 tempest-ServerTagsTestJSON-1314597396 tempest-ServerTagsTestJSON-1314597396-project-member] Expecting reply to msg d91b3701bf4b496d8653752ef00d2d48 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 903.716139] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d91b3701bf4b496d8653752ef00d2d48 [ 903.837924] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b8ddc83-f421-4a30-b363-0a8f87b14a6a {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.846129] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9ecfda9-ffeb-45bc-babe-e6a7b763579f {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.883758] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbbe57f9-a58a-4a5e-acba-2abe814ab0e3 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.892314] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-637b3fd6-9f3f-4821-884c-9c02e886530f {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.905795] env[62096]: DEBUG nova.compute.provider_tree [None req-b46ae41d-7569-429f-b0df-d60640b27629 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 903.906338] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b46ae41d-7569-429f-b0df-d60640b27629 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Expecting reply to msg a6fbe245ce9a416eaec5c6e47f683482 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 903.913423] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a6fbe245ce9a416eaec5c6e47f683482 [ 904.096712] env[62096]: DEBUG oslo_concurrency.lockutils [None req-3fd8e1c2-0ecc-40e5-90b0-01e465c5f366 tempest-ServersV294TestFqdnHostnames-2113678617 tempest-ServersV294TestFqdnHostnames-2113678617-project-member] Lock "9b85f3a6-892f-4eb4-b0d8-aacaf6576c78" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 97.210s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 904.097262] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-66abb748-9105-4dd1-b487-dfa24b044df5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg 9b7c414ba1384b5598e76ebd4b567312 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 904.106742] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9b7c414ba1384b5598e76ebd4b567312 [ 904.117569] env[62096]: DEBUG nova.compute.manager [req-d5e08660-e208-48b0-8603-11abc7aaa081 req-b706e9df-fd5d-44f8-b75b-aed21b563796 service nova] [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] Received event network-changed-843c145b-efae-4f01-a279-a36832055d07 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 904.117772] env[62096]: DEBUG nova.compute.manager [req-d5e08660-e208-48b0-8603-11abc7aaa081 req-b706e9df-fd5d-44f8-b75b-aed21b563796 service nova] [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] Refreshing instance network info cache due to event network-changed-843c145b-efae-4f01-a279-a36832055d07. {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 904.117984] env[62096]: DEBUG oslo_concurrency.lockutils [req-d5e08660-e208-48b0-8603-11abc7aaa081 req-b706e9df-fd5d-44f8-b75b-aed21b563796 service nova] Acquiring lock "refresh_cache-e480be29-d25d-4ff2-8de8-26d6c4078ca9" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 904.118125] env[62096]: DEBUG oslo_concurrency.lockutils [req-d5e08660-e208-48b0-8603-11abc7aaa081 req-b706e9df-fd5d-44f8-b75b-aed21b563796 service nova] Acquired lock "refresh_cache-e480be29-d25d-4ff2-8de8-26d6c4078ca9" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 904.118290] env[62096]: DEBUG nova.network.neutron [req-d5e08660-e208-48b0-8603-11abc7aaa081 req-b706e9df-fd5d-44f8-b75b-aed21b563796 service nova] [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] Refreshing network info cache for port 843c145b-efae-4f01-a279-a36832055d07 {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 904.119057] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-d5e08660-e208-48b0-8603-11abc7aaa081 req-b706e9df-fd5d-44f8-b75b-aed21b563796 service nova] Expecting reply to msg a9963b3882a74dd5bb18d3d8de1ccd3d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 904.125871] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a9963b3882a74dd5bb18d3d8de1ccd3d [ 904.184634] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-423ed1ce-80ce-46c1-a5f7-b083eaadd697 tempest-ServerTagsTestJSON-1314597396 tempest-ServerTagsTestJSON-1314597396-project-member] Expecting reply to msg 23b963f41c6b4f7c9e00babbb7910dd4 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 904.219932] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 23b963f41c6b4f7c9e00babbb7910dd4 [ 904.297339] env[62096]: ERROR nova.compute.manager [None req-423ed1ce-80ce-46c1-a5f7-b083eaadd697 tempest-ServerTagsTestJSON-1314597396 tempest-ServerTagsTestJSON-1314597396-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 843c145b-efae-4f01-a279-a36832055d07, please check neutron logs for more information. [ 904.297339] env[62096]: ERROR nova.compute.manager Traceback (most recent call last): [ 904.297339] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 904.297339] env[62096]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 904.297339] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 904.297339] env[62096]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 904.297339] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 904.297339] env[62096]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 904.297339] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 904.297339] env[62096]: ERROR nova.compute.manager self.force_reraise() [ 904.297339] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 904.297339] env[62096]: ERROR nova.compute.manager raise self.value [ 904.297339] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 904.297339] env[62096]: ERROR nova.compute.manager updated_port = self._update_port( [ 904.297339] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 904.297339] env[62096]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 904.297808] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 904.297808] env[62096]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 904.297808] env[62096]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 843c145b-efae-4f01-a279-a36832055d07, please check neutron logs for more information. [ 904.297808] env[62096]: ERROR nova.compute.manager [ 904.297808] env[62096]: Traceback (most recent call last): [ 904.297808] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 904.297808] env[62096]: listener.cb(fileno) [ 904.297808] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 904.297808] env[62096]: result = function(*args, **kwargs) [ 904.297808] env[62096]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 904.297808] env[62096]: return func(*args, **kwargs) [ 904.297808] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 904.297808] env[62096]: raise e [ 904.297808] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 904.297808] env[62096]: nwinfo = self.network_api.allocate_for_instance( [ 904.297808] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 904.297808] env[62096]: created_port_ids = self._update_ports_for_instance( [ 904.297808] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 904.297808] env[62096]: with excutils.save_and_reraise_exception(): [ 904.297808] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 904.297808] env[62096]: self.force_reraise() [ 904.297808] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 904.297808] env[62096]: raise self.value [ 904.297808] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 904.297808] env[62096]: updated_port = self._update_port( [ 904.297808] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 904.297808] env[62096]: _ensure_no_port_binding_failure(port) [ 904.297808] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 904.297808] env[62096]: raise exception.PortBindingFailed(port_id=port['id']) [ 904.298754] env[62096]: nova.exception.PortBindingFailed: Binding failed for port 843c145b-efae-4f01-a279-a36832055d07, please check neutron logs for more information. [ 904.298754] env[62096]: Removing descriptor: 14 [ 904.409347] env[62096]: DEBUG nova.scheduler.client.report [None req-b46ae41d-7569-429f-b0df-d60640b27629 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 904.411620] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b46ae41d-7569-429f-b0df-d60640b27629 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Expecting reply to msg 6091b0d520054b5cb3728f817d4f642d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 904.422844] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6091b0d520054b5cb3728f817d4f642d [ 904.599639] env[62096]: DEBUG nova.compute.manager [None req-66abb748-9105-4dd1-b487-dfa24b044df5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] Starting instance... {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 904.601547] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-66abb748-9105-4dd1-b487-dfa24b044df5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg c2e7f32bfed546d391829f9714225f5e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 904.634506] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c2e7f32bfed546d391829f9714225f5e [ 904.635747] env[62096]: DEBUG nova.network.neutron [req-d5e08660-e208-48b0-8603-11abc7aaa081 req-b706e9df-fd5d-44f8-b75b-aed21b563796 service nova] [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 904.687782] env[62096]: DEBUG nova.compute.manager [None req-423ed1ce-80ce-46c1-a5f7-b083eaadd697 tempest-ServerTagsTestJSON-1314597396 tempest-ServerTagsTestJSON-1314597396-project-member] [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] Start spawning the instance on the hypervisor. {{(pid=62096) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 904.706846] env[62096]: DEBUG nova.network.neutron [req-d5e08660-e208-48b0-8603-11abc7aaa081 req-b706e9df-fd5d-44f8-b75b-aed21b563796 service nova] [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 904.707211] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-d5e08660-e208-48b0-8603-11abc7aaa081 req-b706e9df-fd5d-44f8-b75b-aed21b563796 service nova] Expecting reply to msg b4f85fa0b1a8415595a161aa5d116ff6 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 904.711330] env[62096]: DEBUG nova.virt.hardware [None req-423ed1ce-80ce-46c1-a5f7-b083eaadd697 tempest-ServerTagsTestJSON-1314597396 tempest-ServerTagsTestJSON-1314597396-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-19T11:52:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-19T11:51:55Z,direct_url=,disk_format='vmdk',id=fb764baa-3805-45c4-a694-aa91b0932110,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bd76085d004b408bbcf1ebf76ba2593e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-19T11:51:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 904.711545] env[62096]: DEBUG nova.virt.hardware [None req-423ed1ce-80ce-46c1-a5f7-b083eaadd697 tempest-ServerTagsTestJSON-1314597396 tempest-ServerTagsTestJSON-1314597396-project-member] Flavor limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 904.711691] env[62096]: DEBUG nova.virt.hardware [None req-423ed1ce-80ce-46c1-a5f7-b083eaadd697 tempest-ServerTagsTestJSON-1314597396 tempest-ServerTagsTestJSON-1314597396-project-member] Image limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 904.711871] env[62096]: DEBUG nova.virt.hardware [None req-423ed1ce-80ce-46c1-a5f7-b083eaadd697 tempest-ServerTagsTestJSON-1314597396 tempest-ServerTagsTestJSON-1314597396-project-member] Flavor pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 904.712020] env[62096]: DEBUG nova.virt.hardware [None req-423ed1ce-80ce-46c1-a5f7-b083eaadd697 tempest-ServerTagsTestJSON-1314597396 tempest-ServerTagsTestJSON-1314597396-project-member] Image pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 904.712174] env[62096]: DEBUG nova.virt.hardware [None req-423ed1ce-80ce-46c1-a5f7-b083eaadd697 tempest-ServerTagsTestJSON-1314597396 tempest-ServerTagsTestJSON-1314597396-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 904.712372] env[62096]: DEBUG nova.virt.hardware [None req-423ed1ce-80ce-46c1-a5f7-b083eaadd697 tempest-ServerTagsTestJSON-1314597396 tempest-ServerTagsTestJSON-1314597396-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 904.712524] env[62096]: DEBUG nova.virt.hardware [None req-423ed1ce-80ce-46c1-a5f7-b083eaadd697 tempest-ServerTagsTestJSON-1314597396 tempest-ServerTagsTestJSON-1314597396-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 904.712682] env[62096]: DEBUG nova.virt.hardware [None req-423ed1ce-80ce-46c1-a5f7-b083eaadd697 tempest-ServerTagsTestJSON-1314597396 tempest-ServerTagsTestJSON-1314597396-project-member] Got 1 possible topologies {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 904.712835] env[62096]: DEBUG nova.virt.hardware [None req-423ed1ce-80ce-46c1-a5f7-b083eaadd697 tempest-ServerTagsTestJSON-1314597396 tempest-ServerTagsTestJSON-1314597396-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 904.713000] env[62096]: DEBUG nova.virt.hardware [None req-423ed1ce-80ce-46c1-a5f7-b083eaadd697 tempest-ServerTagsTestJSON-1314597396 tempest-ServerTagsTestJSON-1314597396-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 904.713870] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c55b3e99-eeab-40ff-baeb-d7c776268ad0 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.717019] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b4f85fa0b1a8415595a161aa5d116ff6 [ 904.722447] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fea5543-4093-4787-aff0-f4c682097273 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.735801] env[62096]: ERROR nova.compute.manager [None req-423ed1ce-80ce-46c1-a5f7-b083eaadd697 tempest-ServerTagsTestJSON-1314597396 tempest-ServerTagsTestJSON-1314597396-project-member] [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 843c145b-efae-4f01-a279-a36832055d07, please check neutron logs for more information. [ 904.735801] env[62096]: ERROR nova.compute.manager [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] Traceback (most recent call last): [ 904.735801] env[62096]: ERROR nova.compute.manager [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 904.735801] env[62096]: ERROR nova.compute.manager [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] yield resources [ 904.735801] env[62096]: ERROR nova.compute.manager [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 904.735801] env[62096]: ERROR nova.compute.manager [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] self.driver.spawn(context, instance, image_meta, [ 904.735801] env[62096]: ERROR nova.compute.manager [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 904.735801] env[62096]: ERROR nova.compute.manager [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 904.735801] env[62096]: ERROR nova.compute.manager [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 904.735801] env[62096]: ERROR nova.compute.manager [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] vm_ref = self.build_virtual_machine(instance, [ 904.735801] env[62096]: ERROR nova.compute.manager [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 904.736153] env[62096]: ERROR nova.compute.manager [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] vif_infos = vmwarevif.get_vif_info(self._session, [ 904.736153] env[62096]: ERROR nova.compute.manager [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 904.736153] env[62096]: ERROR nova.compute.manager [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] for vif in network_info: [ 904.736153] env[62096]: ERROR nova.compute.manager [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 904.736153] env[62096]: ERROR nova.compute.manager [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] return self._sync_wrapper(fn, *args, **kwargs) [ 904.736153] env[62096]: ERROR nova.compute.manager [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 904.736153] env[62096]: ERROR nova.compute.manager [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] self.wait() [ 904.736153] env[62096]: ERROR nova.compute.manager [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 904.736153] env[62096]: ERROR nova.compute.manager [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] self[:] = self._gt.wait() [ 904.736153] env[62096]: ERROR nova.compute.manager [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 904.736153] env[62096]: ERROR nova.compute.manager [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] return self._exit_event.wait() [ 904.736153] env[62096]: ERROR nova.compute.manager [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 904.736153] env[62096]: ERROR nova.compute.manager [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] current.throw(*self._exc) [ 904.736477] env[62096]: ERROR nova.compute.manager [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 904.736477] env[62096]: ERROR nova.compute.manager [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] result = function(*args, **kwargs) [ 904.736477] env[62096]: ERROR nova.compute.manager [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 904.736477] env[62096]: ERROR nova.compute.manager [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] return func(*args, **kwargs) [ 904.736477] env[62096]: ERROR nova.compute.manager [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 904.736477] env[62096]: ERROR nova.compute.manager [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] raise e [ 904.736477] env[62096]: ERROR nova.compute.manager [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 904.736477] env[62096]: ERROR nova.compute.manager [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] nwinfo = self.network_api.allocate_for_instance( [ 904.736477] env[62096]: ERROR nova.compute.manager [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 904.736477] env[62096]: ERROR nova.compute.manager [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] created_port_ids = self._update_ports_for_instance( [ 904.736477] env[62096]: ERROR nova.compute.manager [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 904.736477] env[62096]: ERROR nova.compute.manager [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] with excutils.save_and_reraise_exception(): [ 904.736477] env[62096]: ERROR nova.compute.manager [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 904.736789] env[62096]: ERROR nova.compute.manager [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] self.force_reraise() [ 904.736789] env[62096]: ERROR nova.compute.manager [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 904.736789] env[62096]: ERROR nova.compute.manager [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] raise self.value [ 904.736789] env[62096]: ERROR nova.compute.manager [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 904.736789] env[62096]: ERROR nova.compute.manager [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] updated_port = self._update_port( [ 904.736789] env[62096]: ERROR nova.compute.manager [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 904.736789] env[62096]: ERROR nova.compute.manager [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] _ensure_no_port_binding_failure(port) [ 904.736789] env[62096]: ERROR nova.compute.manager [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 904.736789] env[62096]: ERROR nova.compute.manager [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] raise exception.PortBindingFailed(port_id=port['id']) [ 904.736789] env[62096]: ERROR nova.compute.manager [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] nova.exception.PortBindingFailed: Binding failed for port 843c145b-efae-4f01-a279-a36832055d07, please check neutron logs for more information. [ 904.736789] env[62096]: ERROR nova.compute.manager [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] [ 904.736789] env[62096]: INFO nova.compute.manager [None req-423ed1ce-80ce-46c1-a5f7-b083eaadd697 tempest-ServerTagsTestJSON-1314597396 tempest-ServerTagsTestJSON-1314597396-project-member] [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] Terminating instance [ 904.737873] env[62096]: DEBUG oslo_concurrency.lockutils [None req-423ed1ce-80ce-46c1-a5f7-b083eaadd697 tempest-ServerTagsTestJSON-1314597396 tempest-ServerTagsTestJSON-1314597396-project-member] Acquiring lock "refresh_cache-e480be29-d25d-4ff2-8de8-26d6c4078ca9" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 904.914235] env[62096]: DEBUG oslo_concurrency.lockutils [None req-b46ae41d-7569-429f-b0df-d60640b27629 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.254s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 904.914727] env[62096]: DEBUG nova.compute.manager [None req-b46ae41d-7569-429f-b0df-d60640b27629 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] Start building networks asynchronously for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 904.916533] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b46ae41d-7569-429f-b0df-d60640b27629 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Expecting reply to msg a3ae283e9b994064b08ad782227ecb3c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 904.917594] env[62096]: DEBUG oslo_concurrency.lockutils [None req-0f19979c-d609-46af-b70a-3aa130c0212c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 22.575s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 904.919263] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-0f19979c-d609-46af-b70a-3aa130c0212c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg 159d071e66d84089bfd66b14a9ae00b9 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 904.948671] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a3ae283e9b994064b08ad782227ecb3c [ 904.970213] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 159d071e66d84089bfd66b14a9ae00b9 [ 905.121762] env[62096]: DEBUG oslo_concurrency.lockutils [None req-66abb748-9105-4dd1-b487-dfa24b044df5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 905.209275] env[62096]: DEBUG oslo_concurrency.lockutils [req-d5e08660-e208-48b0-8603-11abc7aaa081 req-b706e9df-fd5d-44f8-b75b-aed21b563796 service nova] Releasing lock "refresh_cache-e480be29-d25d-4ff2-8de8-26d6c4078ca9" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 905.209783] env[62096]: DEBUG oslo_concurrency.lockutils [None req-423ed1ce-80ce-46c1-a5f7-b083eaadd697 tempest-ServerTagsTestJSON-1314597396 tempest-ServerTagsTestJSON-1314597396-project-member] Acquired lock "refresh_cache-e480be29-d25d-4ff2-8de8-26d6c4078ca9" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 905.210050] env[62096]: DEBUG nova.network.neutron [None req-423ed1ce-80ce-46c1-a5f7-b083eaadd697 tempest-ServerTagsTestJSON-1314597396 tempest-ServerTagsTestJSON-1314597396-project-member] [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 905.210434] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-423ed1ce-80ce-46c1-a5f7-b083eaadd697 tempest-ServerTagsTestJSON-1314597396 tempest-ServerTagsTestJSON-1314597396-project-member] Expecting reply to msg 875590faddb849e4a78bdceab14e3721 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 905.217711] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 875590faddb849e4a78bdceab14e3721 [ 905.422377] env[62096]: DEBUG nova.compute.utils [None req-b46ae41d-7569-429f-b0df-d60640b27629 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Using /dev/sd instead of None {{(pid=62096) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 905.422975] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b46ae41d-7569-429f-b0df-d60640b27629 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Expecting reply to msg 10449198349244898d9c5d1f9a14d986 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 905.423944] env[62096]: DEBUG nova.compute.manager [None req-b46ae41d-7569-429f-b0df-d60640b27629 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] Allocating IP information in the background. {{(pid=62096) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 905.424157] env[62096]: DEBUG nova.network.neutron [None req-b46ae41d-7569-429f-b0df-d60640b27629 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] allocate_for_instance() {{(pid=62096) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 905.433545] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 10449198349244898d9c5d1f9a14d986 [ 905.465211] env[62096]: DEBUG nova.policy [None req-b46ae41d-7569-429f-b0df-d60640b27629 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '480141f764ae4387a7740719160c9ddd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b8386168ae3843e58ae487f59e81fc4f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62096) authorize /opt/stack/nova/nova/policy.py:203}} [ 905.622512] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1caa633-0eb3-460a-8d76-706adce157e4 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.631541] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45c4f846-d1c4-44bc-9d36-999c2211dc25 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.664493] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9503c365-e740-43a3-91fa-5adaaa1b3a5b {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.674212] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68e94895-6112-4a42-b674-0feac3df444f {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.688826] env[62096]: DEBUG nova.compute.provider_tree [None req-0f19979c-d609-46af-b70a-3aa130c0212c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 905.689387] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-0f19979c-d609-46af-b70a-3aa130c0212c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg 5612cc1790f04254b77d8a732b32b6b8 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 905.696902] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5612cc1790f04254b77d8a732b32b6b8 [ 905.727524] env[62096]: DEBUG nova.network.neutron [None req-423ed1ce-80ce-46c1-a5f7-b083eaadd697 tempest-ServerTagsTestJSON-1314597396 tempest-ServerTagsTestJSON-1314597396-project-member] [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 905.843336] env[62096]: DEBUG nova.network.neutron [None req-b46ae41d-7569-429f-b0df-d60640b27629 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] Successfully created port: d3b0c730-8282-4ddc-8872-0178f3ddc2e6 {{(pid=62096) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 905.875528] env[62096]: DEBUG nova.network.neutron [None req-423ed1ce-80ce-46c1-a5f7-b083eaadd697 tempest-ServerTagsTestJSON-1314597396 tempest-ServerTagsTestJSON-1314597396-project-member] [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 905.876053] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-423ed1ce-80ce-46c1-a5f7-b083eaadd697 tempest-ServerTagsTestJSON-1314597396 tempest-ServerTagsTestJSON-1314597396-project-member] Expecting reply to msg aecb71d65d494b9a8657964731e025d8 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 905.884423] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg aecb71d65d494b9a8657964731e025d8 [ 905.927525] env[62096]: DEBUG nova.compute.manager [None req-b46ae41d-7569-429f-b0df-d60640b27629 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] Start building block device mappings for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 905.929344] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b46ae41d-7569-429f-b0df-d60640b27629 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Expecting reply to msg b514501bbe1b4166b103d90d97f07c23 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 905.968619] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b514501bbe1b4166b103d90d97f07c23 [ 906.155252] env[62096]: DEBUG nova.compute.manager [req-1d510130-f9b8-445f-9d04-a0760651cd38 req-4c290881-272e-4dfe-893c-5bac116b930c service nova] [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] Received event network-vif-deleted-843c145b-efae-4f01-a279-a36832055d07 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 906.192368] env[62096]: DEBUG nova.scheduler.client.report [None req-0f19979c-d609-46af-b70a-3aa130c0212c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 906.194772] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-0f19979c-d609-46af-b70a-3aa130c0212c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg db159fe7c9244a209d3d9a024b3f76df in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 906.208200] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg db159fe7c9244a209d3d9a024b3f76df [ 906.378694] env[62096]: DEBUG oslo_concurrency.lockutils [None req-423ed1ce-80ce-46c1-a5f7-b083eaadd697 tempest-ServerTagsTestJSON-1314597396 tempest-ServerTagsTestJSON-1314597396-project-member] Releasing lock "refresh_cache-e480be29-d25d-4ff2-8de8-26d6c4078ca9" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 906.379127] env[62096]: DEBUG nova.compute.manager [None req-423ed1ce-80ce-46c1-a5f7-b083eaadd697 tempest-ServerTagsTestJSON-1314597396 tempest-ServerTagsTestJSON-1314597396-project-member] [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] Start destroying the instance on the hypervisor. {{(pid=62096) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 906.379330] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-423ed1ce-80ce-46c1-a5f7-b083eaadd697 tempest-ServerTagsTestJSON-1314597396 tempest-ServerTagsTestJSON-1314597396-project-member] [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] Destroying instance {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 906.379641] env[62096]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-30d6cca0-4666-40a4-88dd-7e30c5f6dfbb {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.389103] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ed3c326-f143-48ae-956c-b754dd2342a8 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.414566] env[62096]: WARNING nova.virt.vmwareapi.vmops [None req-423ed1ce-80ce-46c1-a5f7-b083eaadd697 tempest-ServerTagsTestJSON-1314597396 tempest-ServerTagsTestJSON-1314597396-project-member] [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance e480be29-d25d-4ff2-8de8-26d6c4078ca9 could not be found. [ 906.414783] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-423ed1ce-80ce-46c1-a5f7-b083eaadd697 tempest-ServerTagsTestJSON-1314597396 tempest-ServerTagsTestJSON-1314597396-project-member] [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] Instance destroyed {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 906.414981] env[62096]: INFO nova.compute.manager [None req-423ed1ce-80ce-46c1-a5f7-b083eaadd697 tempest-ServerTagsTestJSON-1314597396 tempest-ServerTagsTestJSON-1314597396-project-member] [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] Took 0.04 seconds to destroy the instance on the hypervisor. [ 906.415235] env[62096]: DEBUG oslo.service.loopingcall [None req-423ed1ce-80ce-46c1-a5f7-b083eaadd697 tempest-ServerTagsTestJSON-1314597396 tempest-ServerTagsTestJSON-1314597396-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 906.415452] env[62096]: DEBUG nova.compute.manager [-] [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 906.415540] env[62096]: DEBUG nova.network.neutron [-] [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 906.429610] env[62096]: DEBUG nova.network.neutron [-] [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 906.430284] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 9466e206045749c4a79020a5cddd5c74 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 906.434904] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b46ae41d-7569-429f-b0df-d60640b27629 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Expecting reply to msg 0010fc7d1b844ff48636d8779dd64d6d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 906.440374] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9466e206045749c4a79020a5cddd5c74 [ 906.478922] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0010fc7d1b844ff48636d8779dd64d6d [ 906.698452] env[62096]: DEBUG oslo_concurrency.lockutils [None req-0f19979c-d609-46af-b70a-3aa130c0212c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.781s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 906.699099] env[62096]: ERROR nova.compute.manager [None req-0f19979c-d609-46af-b70a-3aa130c0212c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 92e9c134-3671-42c7-b549-6f5262e08e26, please check neutron logs for more information. [ 906.699099] env[62096]: ERROR nova.compute.manager [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] Traceback (most recent call last): [ 906.699099] env[62096]: ERROR nova.compute.manager [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 906.699099] env[62096]: ERROR nova.compute.manager [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] self.driver.spawn(context, instance, image_meta, [ 906.699099] env[62096]: ERROR nova.compute.manager [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 906.699099] env[62096]: ERROR nova.compute.manager [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 906.699099] env[62096]: ERROR nova.compute.manager [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 906.699099] env[62096]: ERROR nova.compute.manager [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] vm_ref = self.build_virtual_machine(instance, [ 906.699099] env[62096]: ERROR nova.compute.manager [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 906.699099] env[62096]: ERROR nova.compute.manager [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] vif_infos = vmwarevif.get_vif_info(self._session, [ 906.699099] env[62096]: ERROR nova.compute.manager [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 906.699429] env[62096]: ERROR nova.compute.manager [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] for vif in network_info: [ 906.699429] env[62096]: ERROR nova.compute.manager [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 906.699429] env[62096]: ERROR nova.compute.manager [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] return self._sync_wrapper(fn, *args, **kwargs) [ 906.699429] env[62096]: ERROR nova.compute.manager [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 906.699429] env[62096]: ERROR nova.compute.manager [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] self.wait() [ 906.699429] env[62096]: ERROR nova.compute.manager [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 906.699429] env[62096]: ERROR nova.compute.manager [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] self[:] = self._gt.wait() [ 906.699429] env[62096]: ERROR nova.compute.manager [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 906.699429] env[62096]: ERROR nova.compute.manager [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] return self._exit_event.wait() [ 906.699429] env[62096]: ERROR nova.compute.manager [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 906.699429] env[62096]: ERROR nova.compute.manager [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] current.throw(*self._exc) [ 906.699429] env[62096]: ERROR nova.compute.manager [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 906.699429] env[62096]: ERROR nova.compute.manager [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] result = function(*args, **kwargs) [ 906.699775] env[62096]: ERROR nova.compute.manager [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 906.699775] env[62096]: ERROR nova.compute.manager [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] return func(*args, **kwargs) [ 906.699775] env[62096]: ERROR nova.compute.manager [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 906.699775] env[62096]: ERROR nova.compute.manager [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] raise e [ 906.699775] env[62096]: ERROR nova.compute.manager [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 906.699775] env[62096]: ERROR nova.compute.manager [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] nwinfo = self.network_api.allocate_for_instance( [ 906.699775] env[62096]: ERROR nova.compute.manager [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 906.699775] env[62096]: ERROR nova.compute.manager [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] created_port_ids = self._update_ports_for_instance( [ 906.699775] env[62096]: ERROR nova.compute.manager [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 906.699775] env[62096]: ERROR nova.compute.manager [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] with excutils.save_and_reraise_exception(): [ 906.699775] env[62096]: ERROR nova.compute.manager [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 906.699775] env[62096]: ERROR nova.compute.manager [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] self.force_reraise() [ 906.699775] env[62096]: ERROR nova.compute.manager [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 906.700139] env[62096]: ERROR nova.compute.manager [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] raise self.value [ 906.700139] env[62096]: ERROR nova.compute.manager [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 906.700139] env[62096]: ERROR nova.compute.manager [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] updated_port = self._update_port( [ 906.700139] env[62096]: ERROR nova.compute.manager [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 906.700139] env[62096]: ERROR nova.compute.manager [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] _ensure_no_port_binding_failure(port) [ 906.700139] env[62096]: ERROR nova.compute.manager [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 906.700139] env[62096]: ERROR nova.compute.manager [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] raise exception.PortBindingFailed(port_id=port['id']) [ 906.700139] env[62096]: ERROR nova.compute.manager [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] nova.exception.PortBindingFailed: Binding failed for port 92e9c134-3671-42c7-b549-6f5262e08e26, please check neutron logs for more information. [ 906.700139] env[62096]: ERROR nova.compute.manager [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] [ 906.700139] env[62096]: DEBUG nova.compute.utils [None req-0f19979c-d609-46af-b70a-3aa130c0212c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] Binding failed for port 92e9c134-3671-42c7-b549-6f5262e08e26, please check neutron logs for more information. {{(pid=62096) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 906.701073] env[62096]: DEBUG oslo_concurrency.lockutils [None req-37030ed6-9fb4-44d5-b2b3-bbe259fd74a3 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 22.668s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 906.702817] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-37030ed6-9fb4-44d5-b2b3-bbe259fd74a3 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Expecting reply to msg b0cce49ae8b74b3fa4b35b171be8bacb in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 906.704588] env[62096]: DEBUG nova.compute.manager [None req-0f19979c-d609-46af-b70a-3aa130c0212c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] Build of instance 9aae3fde-f55a-4e90-a9b5-4594051183f4 was re-scheduled: Binding failed for port 92e9c134-3671-42c7-b549-6f5262e08e26, please check neutron logs for more information. {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 906.705080] env[62096]: DEBUG nova.compute.manager [None req-0f19979c-d609-46af-b70a-3aa130c0212c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] Unplugging VIFs for instance {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 906.705276] env[62096]: DEBUG oslo_concurrency.lockutils [None req-0f19979c-d609-46af-b70a-3aa130c0212c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Acquiring lock "refresh_cache-9aae3fde-f55a-4e90-a9b5-4594051183f4" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 906.705425] env[62096]: DEBUG oslo_concurrency.lockutils [None req-0f19979c-d609-46af-b70a-3aa130c0212c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Acquired lock "refresh_cache-9aae3fde-f55a-4e90-a9b5-4594051183f4" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 906.705583] env[62096]: DEBUG nova.network.neutron [None req-0f19979c-d609-46af-b70a-3aa130c0212c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 906.705943] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-0f19979c-d609-46af-b70a-3aa130c0212c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg 3e011eb3a20d4b7a8873d54631dbe3ff in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 906.725132] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3e011eb3a20d4b7a8873d54631dbe3ff [ 906.737668] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b0cce49ae8b74b3fa4b35b171be8bacb [ 906.845808] env[62096]: ERROR nova.compute.manager [None req-b46ae41d-7569-429f-b0df-d60640b27629 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port d3b0c730-8282-4ddc-8872-0178f3ddc2e6, please check neutron logs for more information. [ 906.845808] env[62096]: ERROR nova.compute.manager Traceback (most recent call last): [ 906.845808] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 906.845808] env[62096]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 906.845808] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 906.845808] env[62096]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 906.845808] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 906.845808] env[62096]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 906.845808] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 906.845808] env[62096]: ERROR nova.compute.manager self.force_reraise() [ 906.845808] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 906.845808] env[62096]: ERROR nova.compute.manager raise self.value [ 906.845808] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 906.845808] env[62096]: ERROR nova.compute.manager updated_port = self._update_port( [ 906.845808] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 906.845808] env[62096]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 906.846259] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 906.846259] env[62096]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 906.846259] env[62096]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port d3b0c730-8282-4ddc-8872-0178f3ddc2e6, please check neutron logs for more information. [ 906.846259] env[62096]: ERROR nova.compute.manager [ 906.846259] env[62096]: Traceback (most recent call last): [ 906.846259] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 906.846259] env[62096]: listener.cb(fileno) [ 906.846259] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 906.846259] env[62096]: result = function(*args, **kwargs) [ 906.846259] env[62096]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 906.846259] env[62096]: return func(*args, **kwargs) [ 906.846259] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 906.846259] env[62096]: raise e [ 906.846259] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 906.846259] env[62096]: nwinfo = self.network_api.allocate_for_instance( [ 906.846259] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 906.846259] env[62096]: created_port_ids = self._update_ports_for_instance( [ 906.846259] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 906.846259] env[62096]: with excutils.save_and_reraise_exception(): [ 906.846259] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 906.846259] env[62096]: self.force_reraise() [ 906.846259] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 906.846259] env[62096]: raise self.value [ 906.846259] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 906.846259] env[62096]: updated_port = self._update_port( [ 906.846259] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 906.846259] env[62096]: _ensure_no_port_binding_failure(port) [ 906.846259] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 906.846259] env[62096]: raise exception.PortBindingFailed(port_id=port['id']) [ 906.846955] env[62096]: nova.exception.PortBindingFailed: Binding failed for port d3b0c730-8282-4ddc-8872-0178f3ddc2e6, please check neutron logs for more information. [ 906.846955] env[62096]: Removing descriptor: 14 [ 906.932082] env[62096]: DEBUG nova.network.neutron [-] [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 906.932565] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg ae5efc4e8bd2463ea9a643d243b9b0fb in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 906.937955] env[62096]: DEBUG nova.compute.manager [None req-b46ae41d-7569-429f-b0df-d60640b27629 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] Start spawning the instance on the hypervisor. {{(pid=62096) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 906.941875] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ae5efc4e8bd2463ea9a643d243b9b0fb [ 906.973018] env[62096]: DEBUG nova.virt.hardware [None req-b46ae41d-7569-429f-b0df-d60640b27629 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-19T11:52:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-19T11:51:55Z,direct_url=,disk_format='vmdk',id=fb764baa-3805-45c4-a694-aa91b0932110,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bd76085d004b408bbcf1ebf76ba2593e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-19T11:51:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 906.973018] env[62096]: DEBUG nova.virt.hardware [None req-b46ae41d-7569-429f-b0df-d60640b27629 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Flavor limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 906.973468] env[62096]: DEBUG nova.virt.hardware [None req-b46ae41d-7569-429f-b0df-d60640b27629 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Image limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 906.973468] env[62096]: DEBUG nova.virt.hardware [None req-b46ae41d-7569-429f-b0df-d60640b27629 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Flavor pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 906.973468] env[62096]: DEBUG nova.virt.hardware [None req-b46ae41d-7569-429f-b0df-d60640b27629 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Image pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 906.973559] env[62096]: DEBUG nova.virt.hardware [None req-b46ae41d-7569-429f-b0df-d60640b27629 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 906.973740] env[62096]: DEBUG nova.virt.hardware [None req-b46ae41d-7569-429f-b0df-d60640b27629 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 906.974004] env[62096]: DEBUG nova.virt.hardware [None req-b46ae41d-7569-429f-b0df-d60640b27629 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 906.974301] env[62096]: DEBUG nova.virt.hardware [None req-b46ae41d-7569-429f-b0df-d60640b27629 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Got 1 possible topologies {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 906.974411] env[62096]: DEBUG nova.virt.hardware [None req-b46ae41d-7569-429f-b0df-d60640b27629 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 906.974500] env[62096]: DEBUG nova.virt.hardware [None req-b46ae41d-7569-429f-b0df-d60640b27629 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 906.975443] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9c04b0b-25ec-4ff7-8e53-c8b23239df95 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.984665] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2f4bb98-c862-49c5-87a5-e350ec0431cf {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.004565] env[62096]: ERROR nova.compute.manager [None req-b46ae41d-7569-429f-b0df-d60640b27629 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port d3b0c730-8282-4ddc-8872-0178f3ddc2e6, please check neutron logs for more information. [ 907.004565] env[62096]: ERROR nova.compute.manager [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] Traceback (most recent call last): [ 907.004565] env[62096]: ERROR nova.compute.manager [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 907.004565] env[62096]: ERROR nova.compute.manager [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] yield resources [ 907.004565] env[62096]: ERROR nova.compute.manager [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 907.004565] env[62096]: ERROR nova.compute.manager [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] self.driver.spawn(context, instance, image_meta, [ 907.004565] env[62096]: ERROR nova.compute.manager [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 907.004565] env[62096]: ERROR nova.compute.manager [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 907.004565] env[62096]: ERROR nova.compute.manager [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 907.004565] env[62096]: ERROR nova.compute.manager [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] vm_ref = self.build_virtual_machine(instance, [ 907.004565] env[62096]: ERROR nova.compute.manager [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 907.004925] env[62096]: ERROR nova.compute.manager [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] vif_infos = vmwarevif.get_vif_info(self._session, [ 907.004925] env[62096]: ERROR nova.compute.manager [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 907.004925] env[62096]: ERROR nova.compute.manager [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] for vif in network_info: [ 907.004925] env[62096]: ERROR nova.compute.manager [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 907.004925] env[62096]: ERROR nova.compute.manager [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] return self._sync_wrapper(fn, *args, **kwargs) [ 907.004925] env[62096]: ERROR nova.compute.manager [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 907.004925] env[62096]: ERROR nova.compute.manager [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] self.wait() [ 907.004925] env[62096]: ERROR nova.compute.manager [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 907.004925] env[62096]: ERROR nova.compute.manager [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] self[:] = self._gt.wait() [ 907.004925] env[62096]: ERROR nova.compute.manager [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 907.004925] env[62096]: ERROR nova.compute.manager [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] return self._exit_event.wait() [ 907.004925] env[62096]: ERROR nova.compute.manager [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 907.004925] env[62096]: ERROR nova.compute.manager [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] current.throw(*self._exc) [ 907.005268] env[62096]: ERROR nova.compute.manager [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 907.005268] env[62096]: ERROR nova.compute.manager [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] result = function(*args, **kwargs) [ 907.005268] env[62096]: ERROR nova.compute.manager [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 907.005268] env[62096]: ERROR nova.compute.manager [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] return func(*args, **kwargs) [ 907.005268] env[62096]: ERROR nova.compute.manager [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 907.005268] env[62096]: ERROR nova.compute.manager [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] raise e [ 907.005268] env[62096]: ERROR nova.compute.manager [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 907.005268] env[62096]: ERROR nova.compute.manager [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] nwinfo = self.network_api.allocate_for_instance( [ 907.005268] env[62096]: ERROR nova.compute.manager [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 907.005268] env[62096]: ERROR nova.compute.manager [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] created_port_ids = self._update_ports_for_instance( [ 907.005268] env[62096]: ERROR nova.compute.manager [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 907.005268] env[62096]: ERROR nova.compute.manager [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] with excutils.save_and_reraise_exception(): [ 907.005268] env[62096]: ERROR nova.compute.manager [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 907.005585] env[62096]: ERROR nova.compute.manager [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] self.force_reraise() [ 907.005585] env[62096]: ERROR nova.compute.manager [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 907.005585] env[62096]: ERROR nova.compute.manager [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] raise self.value [ 907.005585] env[62096]: ERROR nova.compute.manager [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 907.005585] env[62096]: ERROR nova.compute.manager [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] updated_port = self._update_port( [ 907.005585] env[62096]: ERROR nova.compute.manager [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 907.005585] env[62096]: ERROR nova.compute.manager [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] _ensure_no_port_binding_failure(port) [ 907.005585] env[62096]: ERROR nova.compute.manager [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 907.005585] env[62096]: ERROR nova.compute.manager [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] raise exception.PortBindingFailed(port_id=port['id']) [ 907.005585] env[62096]: ERROR nova.compute.manager [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] nova.exception.PortBindingFailed: Binding failed for port d3b0c730-8282-4ddc-8872-0178f3ddc2e6, please check neutron logs for more information. [ 907.005585] env[62096]: ERROR nova.compute.manager [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] [ 907.006283] env[62096]: INFO nova.compute.manager [None req-b46ae41d-7569-429f-b0df-d60640b27629 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] Terminating instance [ 907.009043] env[62096]: DEBUG oslo_concurrency.lockutils [None req-b46ae41d-7569-429f-b0df-d60640b27629 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Acquiring lock "refresh_cache-1c02c726-ab23-49e5-8d55-b222c712225a" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 907.009392] env[62096]: DEBUG oslo_concurrency.lockutils [None req-b46ae41d-7569-429f-b0df-d60640b27629 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Acquired lock "refresh_cache-1c02c726-ab23-49e5-8d55-b222c712225a" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 907.009682] env[62096]: DEBUG nova.network.neutron [None req-b46ae41d-7569-429f-b0df-d60640b27629 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 907.010790] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b46ae41d-7569-429f-b0df-d60640b27629 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Expecting reply to msg 2986ac736b114fd88819f7443b92b980 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 907.018493] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2986ac736b114fd88819f7443b92b980 [ 907.232938] env[62096]: DEBUG nova.network.neutron [None req-0f19979c-d609-46af-b70a-3aa130c0212c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 907.319907] env[62096]: DEBUG nova.network.neutron [None req-0f19979c-d609-46af-b70a-3aa130c0212c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 907.320463] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-0f19979c-d609-46af-b70a-3aa130c0212c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg 6ba5ba7926144c8db730f430966f9fea in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 907.328994] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6ba5ba7926144c8db730f430966f9fea [ 907.400889] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8e18c0a-385b-4e52-b125-ffd49df2d61d {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.408676] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ca4a69a-28c3-416f-a928-0e66964772e4 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.439858] env[62096]: INFO nova.compute.manager [-] [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] Took 1.02 seconds to deallocate network for instance. [ 907.442581] env[62096]: DEBUG nova.compute.claims [None req-423ed1ce-80ce-46c1-a5f7-b083eaadd697 tempest-ServerTagsTestJSON-1314597396 tempest-ServerTagsTestJSON-1314597396-project-member] [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] Aborting claim: {{(pid=62096) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 907.442769] env[62096]: DEBUG oslo_concurrency.lockutils [None req-423ed1ce-80ce-46c1-a5f7-b083eaadd697 tempest-ServerTagsTestJSON-1314597396 tempest-ServerTagsTestJSON-1314597396-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 907.443544] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44c11150-e0e2-49eb-9613-b5da2ded4e64 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.452840] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bca7bf4d-2b76-4208-8887-3422e2cfbd2a {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.465467] env[62096]: DEBUG nova.compute.provider_tree [None req-37030ed6-9fb4-44d5-b2b3-bbe259fd74a3 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 907.466343] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-37030ed6-9fb4-44d5-b2b3-bbe259fd74a3 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Expecting reply to msg 86d3edc911d345e3bed89f4423450c26 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 907.476720] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 86d3edc911d345e3bed89f4423450c26 [ 907.528878] env[62096]: DEBUG nova.network.neutron [None req-b46ae41d-7569-429f-b0df-d60640b27629 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 907.549369] env[62096]: DEBUG oslo_concurrency.lockutils [None req-cad51331-71f6-4082-9e97-6ae9fadbb222 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Acquiring lock "dc451ce2-65de-4497-ad48-fd776f73cb80" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 907.549369] env[62096]: DEBUG oslo_concurrency.lockutils [None req-cad51331-71f6-4082-9e97-6ae9fadbb222 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Lock "dc451ce2-65de-4497-ad48-fd776f73cb80" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 907.597689] env[62096]: DEBUG nova.network.neutron [None req-b46ae41d-7569-429f-b0df-d60640b27629 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 907.598209] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b46ae41d-7569-429f-b0df-d60640b27629 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Expecting reply to msg 15daa6fed73a49aeb1d61a1f46a3d575 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 907.607470] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 15daa6fed73a49aeb1d61a1f46a3d575 [ 907.717849] env[62096]: DEBUG oslo_concurrency.lockutils [None req-5865a84c-0be2-46e8-ab17-dbd444d3f325 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Acquiring lock "9f3ce570-878f-48bf-a08c-0387b0556785" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 907.718092] env[62096]: DEBUG oslo_concurrency.lockutils [None req-5865a84c-0be2-46e8-ab17-dbd444d3f325 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Lock "9f3ce570-878f-48bf-a08c-0387b0556785" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 907.823089] env[62096]: DEBUG oslo_concurrency.lockutils [None req-0f19979c-d609-46af-b70a-3aa130c0212c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Releasing lock "refresh_cache-9aae3fde-f55a-4e90-a9b5-4594051183f4" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 907.823400] env[62096]: DEBUG nova.compute.manager [None req-0f19979c-d609-46af-b70a-3aa130c0212c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 907.823588] env[62096]: DEBUG nova.compute.manager [None req-0f19979c-d609-46af-b70a-3aa130c0212c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 907.823753] env[62096]: DEBUG nova.network.neutron [None req-0f19979c-d609-46af-b70a-3aa130c0212c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 907.837485] env[62096]: DEBUG nova.network.neutron [None req-0f19979c-d609-46af-b70a-3aa130c0212c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 907.837934] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-0f19979c-d609-46af-b70a-3aa130c0212c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg 4c4097bf0e2f480ab5fe3dd630a72017 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 907.845519] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4c4097bf0e2f480ab5fe3dd630a72017 [ 907.968546] env[62096]: DEBUG nova.scheduler.client.report [None req-37030ed6-9fb4-44d5-b2b3-bbe259fd74a3 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 907.970843] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-37030ed6-9fb4-44d5-b2b3-bbe259fd74a3 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Expecting reply to msg 9a03c6b73582484c9635425e74d91895 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 907.986038] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9a03c6b73582484c9635425e74d91895 [ 908.101780] env[62096]: DEBUG oslo_concurrency.lockutils [None req-b46ae41d-7569-429f-b0df-d60640b27629 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Releasing lock "refresh_cache-1c02c726-ab23-49e5-8d55-b222c712225a" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 908.101780] env[62096]: DEBUG nova.compute.manager [None req-b46ae41d-7569-429f-b0df-d60640b27629 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] Start destroying the instance on the hypervisor. {{(pid=62096) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 908.101780] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-b46ae41d-7569-429f-b0df-d60640b27629 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] Destroying instance {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 908.101780] env[62096]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8c9239ea-e8a1-41a2-8a24-31d0d1c8726d {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.110913] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35b3cb83-a6cb-4752-a8b9-403a7dba15e2 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.131715] env[62096]: WARNING nova.virt.vmwareapi.vmops [None req-b46ae41d-7569-429f-b0df-d60640b27629 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 1c02c726-ab23-49e5-8d55-b222c712225a could not be found. [ 908.131931] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-b46ae41d-7569-429f-b0df-d60640b27629 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] Instance destroyed {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 908.132127] env[62096]: INFO nova.compute.manager [None req-b46ae41d-7569-429f-b0df-d60640b27629 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] Took 0.03 seconds to destroy the instance on the hypervisor. [ 908.132364] env[62096]: DEBUG oslo.service.loopingcall [None req-b46ae41d-7569-429f-b0df-d60640b27629 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 908.132589] env[62096]: DEBUG nova.compute.manager [-] [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 908.132681] env[62096]: DEBUG nova.network.neutron [-] [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 908.146309] env[62096]: DEBUG nova.network.neutron [-] [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 908.146773] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 7c16acf13db54a12b6c6c5665f0d2b9e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 908.153058] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7c16acf13db54a12b6c6c5665f0d2b9e [ 908.180600] env[62096]: DEBUG nova.compute.manager [req-6717c78a-42c8-432b-a5a5-ddfeb3dea163 req-b186fae0-717c-4d79-87ed-58afe211ddea service nova] [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] Received event network-changed-d3b0c730-8282-4ddc-8872-0178f3ddc2e6 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 908.180600] env[62096]: DEBUG nova.compute.manager [req-6717c78a-42c8-432b-a5a5-ddfeb3dea163 req-b186fae0-717c-4d79-87ed-58afe211ddea service nova] [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] Refreshing instance network info cache due to event network-changed-d3b0c730-8282-4ddc-8872-0178f3ddc2e6. {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 908.180991] env[62096]: DEBUG oslo_concurrency.lockutils [req-6717c78a-42c8-432b-a5a5-ddfeb3dea163 req-b186fae0-717c-4d79-87ed-58afe211ddea service nova] Acquiring lock "refresh_cache-1c02c726-ab23-49e5-8d55-b222c712225a" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 908.180991] env[62096]: DEBUG oslo_concurrency.lockutils [req-6717c78a-42c8-432b-a5a5-ddfeb3dea163 req-b186fae0-717c-4d79-87ed-58afe211ddea service nova] Acquired lock "refresh_cache-1c02c726-ab23-49e5-8d55-b222c712225a" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 908.181066] env[62096]: DEBUG nova.network.neutron [req-6717c78a-42c8-432b-a5a5-ddfeb3dea163 req-b186fae0-717c-4d79-87ed-58afe211ddea service nova] [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] Refreshing network info cache for port d3b0c730-8282-4ddc-8872-0178f3ddc2e6 {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 908.181464] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-6717c78a-42c8-432b-a5a5-ddfeb3dea163 req-b186fae0-717c-4d79-87ed-58afe211ddea service nova] Expecting reply to msg d48ea022a5b44335ba17da3cfe43aaa2 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 908.187885] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d48ea022a5b44335ba17da3cfe43aaa2 [ 908.340413] env[62096]: DEBUG nova.network.neutron [None req-0f19979c-d609-46af-b70a-3aa130c0212c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 908.340733] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-0f19979c-d609-46af-b70a-3aa130c0212c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg 3552a1af5edf4ffa9ea8f090a43ab21b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 908.348260] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3552a1af5edf4ffa9ea8f090a43ab21b [ 908.473345] env[62096]: DEBUG oslo_concurrency.lockutils [None req-37030ed6-9fb4-44d5-b2b3-bbe259fd74a3 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.772s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 908.473941] env[62096]: ERROR nova.compute.manager [None req-37030ed6-9fb4-44d5-b2b3-bbe259fd74a3 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: 60bc5e14-f495-48da-8db0-54d75b523822] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 7e36f48d-97d8-4355-924b-977fd775b8b0, please check neutron logs for more information. [ 908.473941] env[62096]: ERROR nova.compute.manager [instance: 60bc5e14-f495-48da-8db0-54d75b523822] Traceback (most recent call last): [ 908.473941] env[62096]: ERROR nova.compute.manager [instance: 60bc5e14-f495-48da-8db0-54d75b523822] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 908.473941] env[62096]: ERROR nova.compute.manager [instance: 60bc5e14-f495-48da-8db0-54d75b523822] self.driver.spawn(context, instance, image_meta, [ 908.473941] env[62096]: ERROR nova.compute.manager [instance: 60bc5e14-f495-48da-8db0-54d75b523822] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 908.473941] env[62096]: ERROR nova.compute.manager [instance: 60bc5e14-f495-48da-8db0-54d75b523822] self._vmops.spawn(context, instance, image_meta, injected_files, [ 908.473941] env[62096]: ERROR nova.compute.manager [instance: 60bc5e14-f495-48da-8db0-54d75b523822] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 908.473941] env[62096]: ERROR nova.compute.manager [instance: 60bc5e14-f495-48da-8db0-54d75b523822] vm_ref = self.build_virtual_machine(instance, [ 908.473941] env[62096]: ERROR nova.compute.manager [instance: 60bc5e14-f495-48da-8db0-54d75b523822] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 908.473941] env[62096]: ERROR nova.compute.manager [instance: 60bc5e14-f495-48da-8db0-54d75b523822] vif_infos = vmwarevif.get_vif_info(self._session, [ 908.473941] env[62096]: ERROR nova.compute.manager [instance: 60bc5e14-f495-48da-8db0-54d75b523822] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 908.474418] env[62096]: ERROR nova.compute.manager [instance: 60bc5e14-f495-48da-8db0-54d75b523822] for vif in network_info: [ 908.474418] env[62096]: ERROR nova.compute.manager [instance: 60bc5e14-f495-48da-8db0-54d75b523822] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 908.474418] env[62096]: ERROR nova.compute.manager [instance: 60bc5e14-f495-48da-8db0-54d75b523822] return self._sync_wrapper(fn, *args, **kwargs) [ 908.474418] env[62096]: ERROR nova.compute.manager [instance: 60bc5e14-f495-48da-8db0-54d75b523822] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 908.474418] env[62096]: ERROR nova.compute.manager [instance: 60bc5e14-f495-48da-8db0-54d75b523822] self.wait() [ 908.474418] env[62096]: ERROR nova.compute.manager [instance: 60bc5e14-f495-48da-8db0-54d75b523822] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 908.474418] env[62096]: ERROR nova.compute.manager [instance: 60bc5e14-f495-48da-8db0-54d75b523822] self[:] = self._gt.wait() [ 908.474418] env[62096]: ERROR nova.compute.manager [instance: 60bc5e14-f495-48da-8db0-54d75b523822] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 908.474418] env[62096]: ERROR nova.compute.manager [instance: 60bc5e14-f495-48da-8db0-54d75b523822] return self._exit_event.wait() [ 908.474418] env[62096]: ERROR nova.compute.manager [instance: 60bc5e14-f495-48da-8db0-54d75b523822] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 908.474418] env[62096]: ERROR nova.compute.manager [instance: 60bc5e14-f495-48da-8db0-54d75b523822] current.throw(*self._exc) [ 908.474418] env[62096]: ERROR nova.compute.manager [instance: 60bc5e14-f495-48da-8db0-54d75b523822] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 908.474418] env[62096]: ERROR nova.compute.manager [instance: 60bc5e14-f495-48da-8db0-54d75b523822] result = function(*args, **kwargs) [ 908.474818] env[62096]: ERROR nova.compute.manager [instance: 60bc5e14-f495-48da-8db0-54d75b523822] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 908.474818] env[62096]: ERROR nova.compute.manager [instance: 60bc5e14-f495-48da-8db0-54d75b523822] return func(*args, **kwargs) [ 908.474818] env[62096]: ERROR nova.compute.manager [instance: 60bc5e14-f495-48da-8db0-54d75b523822] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 908.474818] env[62096]: ERROR nova.compute.manager [instance: 60bc5e14-f495-48da-8db0-54d75b523822] raise e [ 908.474818] env[62096]: ERROR nova.compute.manager [instance: 60bc5e14-f495-48da-8db0-54d75b523822] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 908.474818] env[62096]: ERROR nova.compute.manager [instance: 60bc5e14-f495-48da-8db0-54d75b523822] nwinfo = self.network_api.allocate_for_instance( [ 908.474818] env[62096]: ERROR nova.compute.manager [instance: 60bc5e14-f495-48da-8db0-54d75b523822] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 908.474818] env[62096]: ERROR nova.compute.manager [instance: 60bc5e14-f495-48da-8db0-54d75b523822] created_port_ids = self._update_ports_for_instance( [ 908.474818] env[62096]: ERROR nova.compute.manager [instance: 60bc5e14-f495-48da-8db0-54d75b523822] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 908.474818] env[62096]: ERROR nova.compute.manager [instance: 60bc5e14-f495-48da-8db0-54d75b523822] with excutils.save_and_reraise_exception(): [ 908.474818] env[62096]: ERROR nova.compute.manager [instance: 60bc5e14-f495-48da-8db0-54d75b523822] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 908.474818] env[62096]: ERROR nova.compute.manager [instance: 60bc5e14-f495-48da-8db0-54d75b523822] self.force_reraise() [ 908.474818] env[62096]: ERROR nova.compute.manager [instance: 60bc5e14-f495-48da-8db0-54d75b523822] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 908.475252] env[62096]: ERROR nova.compute.manager [instance: 60bc5e14-f495-48da-8db0-54d75b523822] raise self.value [ 908.475252] env[62096]: ERROR nova.compute.manager [instance: 60bc5e14-f495-48da-8db0-54d75b523822] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 908.475252] env[62096]: ERROR nova.compute.manager [instance: 60bc5e14-f495-48da-8db0-54d75b523822] updated_port = self._update_port( [ 908.475252] env[62096]: ERROR nova.compute.manager [instance: 60bc5e14-f495-48da-8db0-54d75b523822] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 908.475252] env[62096]: ERROR nova.compute.manager [instance: 60bc5e14-f495-48da-8db0-54d75b523822] _ensure_no_port_binding_failure(port) [ 908.475252] env[62096]: ERROR nova.compute.manager [instance: 60bc5e14-f495-48da-8db0-54d75b523822] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 908.475252] env[62096]: ERROR nova.compute.manager [instance: 60bc5e14-f495-48da-8db0-54d75b523822] raise exception.PortBindingFailed(port_id=port['id']) [ 908.475252] env[62096]: ERROR nova.compute.manager [instance: 60bc5e14-f495-48da-8db0-54d75b523822] nova.exception.PortBindingFailed: Binding failed for port 7e36f48d-97d8-4355-924b-977fd775b8b0, please check neutron logs for more information. [ 908.475252] env[62096]: ERROR nova.compute.manager [instance: 60bc5e14-f495-48da-8db0-54d75b523822] [ 908.475252] env[62096]: DEBUG nova.compute.utils [None req-37030ed6-9fb4-44d5-b2b3-bbe259fd74a3 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: 60bc5e14-f495-48da-8db0-54d75b523822] Binding failed for port 7e36f48d-97d8-4355-924b-977fd775b8b0, please check neutron logs for more information. {{(pid=62096) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 908.476440] env[62096]: DEBUG nova.compute.manager [None req-37030ed6-9fb4-44d5-b2b3-bbe259fd74a3 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: 60bc5e14-f495-48da-8db0-54d75b523822] Build of instance 60bc5e14-f495-48da-8db0-54d75b523822 was re-scheduled: Binding failed for port 7e36f48d-97d8-4355-924b-977fd775b8b0, please check neutron logs for more information. {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 908.476849] env[62096]: DEBUG nova.compute.manager [None req-37030ed6-9fb4-44d5-b2b3-bbe259fd74a3 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: 60bc5e14-f495-48da-8db0-54d75b523822] Unplugging VIFs for instance {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 908.477074] env[62096]: DEBUG oslo_concurrency.lockutils [None req-37030ed6-9fb4-44d5-b2b3-bbe259fd74a3 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Acquiring lock "refresh_cache-60bc5e14-f495-48da-8db0-54d75b523822" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 908.477220] env[62096]: DEBUG oslo_concurrency.lockutils [None req-37030ed6-9fb4-44d5-b2b3-bbe259fd74a3 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Acquired lock "refresh_cache-60bc5e14-f495-48da-8db0-54d75b523822" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 908.477374] env[62096]: DEBUG nova.network.neutron [None req-37030ed6-9fb4-44d5-b2b3-bbe259fd74a3 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: 60bc5e14-f495-48da-8db0-54d75b523822] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 908.477764] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-37030ed6-9fb4-44d5-b2b3-bbe259fd74a3 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Expecting reply to msg c886ade43a244761ac8a7a75fc3ea759 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 908.478926] env[62096]: DEBUG oslo_concurrency.lockutils [None req-a49958a4-6510-4969-b280-c49ebf520e10 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 23.390s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 908.479102] env[62096]: DEBUG nova.objects.instance [None req-a49958a4-6510-4969-b280-c49ebf520e10 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] [instance: 5627f913-29d2-476e-8fde-8ea457cc56f2] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62096) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 908.480549] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a49958a4-6510-4969-b280-c49ebf520e10 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Expecting reply to msg cf7121b3d4894cca8cb9b110aa1757ef in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 908.485813] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c886ade43a244761ac8a7a75fc3ea759 [ 908.511786] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cf7121b3d4894cca8cb9b110aa1757ef [ 908.649104] env[62096]: DEBUG nova.network.neutron [-] [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 908.649594] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 3411784a66e84fd983d3886167377f05 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 908.657702] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3411784a66e84fd983d3886167377f05 [ 908.700499] env[62096]: DEBUG nova.network.neutron [req-6717c78a-42c8-432b-a5a5-ddfeb3dea163 req-b186fae0-717c-4d79-87ed-58afe211ddea service nova] [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 908.768748] env[62096]: DEBUG nova.network.neutron [req-6717c78a-42c8-432b-a5a5-ddfeb3dea163 req-b186fae0-717c-4d79-87ed-58afe211ddea service nova] [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 908.769300] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-6717c78a-42c8-432b-a5a5-ddfeb3dea163 req-b186fae0-717c-4d79-87ed-58afe211ddea service nova] Expecting reply to msg 7e3211d8b7574978a25afad69f477d9b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 908.777684] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7e3211d8b7574978a25afad69f477d9b [ 908.843056] env[62096]: INFO nova.compute.manager [None req-0f19979c-d609-46af-b70a-3aa130c0212c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: 9aae3fde-f55a-4e90-a9b5-4594051183f4] Took 1.02 seconds to deallocate network for instance. [ 908.845010] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-0f19979c-d609-46af-b70a-3aa130c0212c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg 499b73f3986745d48bfd831f37cd48fa in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 908.877656] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 499b73f3986745d48bfd831f37cd48fa [ 908.988146] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a49958a4-6510-4969-b280-c49ebf520e10 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Expecting reply to msg 04a774bc4fae44a4b75942d9f629f8cf in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 908.993378] env[62096]: DEBUG nova.network.neutron [None req-37030ed6-9fb4-44d5-b2b3-bbe259fd74a3 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: 60bc5e14-f495-48da-8db0-54d75b523822] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 908.995645] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 04a774bc4fae44a4b75942d9f629f8cf [ 909.060994] env[62096]: DEBUG nova.network.neutron [None req-37030ed6-9fb4-44d5-b2b3-bbe259fd74a3 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: 60bc5e14-f495-48da-8db0-54d75b523822] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 909.061522] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-37030ed6-9fb4-44d5-b2b3-bbe259fd74a3 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Expecting reply to msg ec7ec971b33447c181164a17b9413de8 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 909.070222] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ec7ec971b33447c181164a17b9413de8 [ 909.151386] env[62096]: INFO nova.compute.manager [-] [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] Took 1.02 seconds to deallocate network for instance. [ 909.154647] env[62096]: DEBUG nova.compute.claims [None req-b46ae41d-7569-429f-b0df-d60640b27629 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] Aborting claim: {{(pid=62096) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 909.155018] env[62096]: DEBUG oslo_concurrency.lockutils [None req-b46ae41d-7569-429f-b0df-d60640b27629 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 909.272381] env[62096]: DEBUG oslo_concurrency.lockutils [req-6717c78a-42c8-432b-a5a5-ddfeb3dea163 req-b186fae0-717c-4d79-87ed-58afe211ddea service nova] Releasing lock "refresh_cache-1c02c726-ab23-49e5-8d55-b222c712225a" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 909.272636] env[62096]: DEBUG nova.compute.manager [req-6717c78a-42c8-432b-a5a5-ddfeb3dea163 req-b186fae0-717c-4d79-87ed-58afe211ddea service nova] [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] Received event network-vif-deleted-d3b0c730-8282-4ddc-8872-0178f3ddc2e6 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 909.349164] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-0f19979c-d609-46af-b70a-3aa130c0212c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg 5e63b3384fea45a1b5baffb7d9bc3c3e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 909.381251] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5e63b3384fea45a1b5baffb7d9bc3c3e [ 909.491674] env[62096]: DEBUG oslo_concurrency.lockutils [None req-a49958a4-6510-4969-b280-c49ebf520e10 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.013s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 909.492132] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-a49958a4-6510-4969-b280-c49ebf520e10 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Expecting reply to msg 090c0a38f53b4aa1b8be3b4cdb4ca4eb in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 909.493538] env[62096]: DEBUG oslo_concurrency.lockutils [None req-c70231a0-48a5-4b3b-b009-020c7f69a59b tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 20.824s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 909.495548] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c70231a0-48a5-4b3b-b009-020c7f69a59b tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Expecting reply to msg c51c7a92f7af46d899b92b8045e07688 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 909.503254] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 090c0a38f53b4aa1b8be3b4cdb4ca4eb [ 909.528286] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c51c7a92f7af46d899b92b8045e07688 [ 909.563498] env[62096]: DEBUG oslo_concurrency.lockutils [None req-37030ed6-9fb4-44d5-b2b3-bbe259fd74a3 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Releasing lock "refresh_cache-60bc5e14-f495-48da-8db0-54d75b523822" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 909.563834] env[62096]: DEBUG nova.compute.manager [None req-37030ed6-9fb4-44d5-b2b3-bbe259fd74a3 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 909.563909] env[62096]: DEBUG nova.compute.manager [None req-37030ed6-9fb4-44d5-b2b3-bbe259fd74a3 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: 60bc5e14-f495-48da-8db0-54d75b523822] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 909.564142] env[62096]: DEBUG nova.network.neutron [None req-37030ed6-9fb4-44d5-b2b3-bbe259fd74a3 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: 60bc5e14-f495-48da-8db0-54d75b523822] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 909.578611] env[62096]: DEBUG nova.network.neutron [None req-37030ed6-9fb4-44d5-b2b3-bbe259fd74a3 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: 60bc5e14-f495-48da-8db0-54d75b523822] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 909.579133] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-37030ed6-9fb4-44d5-b2b3-bbe259fd74a3 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Expecting reply to msg 3755e8509ce640b7b6dbf7eb3ccc8c34 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 909.585192] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3755e8509ce640b7b6dbf7eb3ccc8c34 [ 909.874057] env[62096]: INFO nova.scheduler.client.report [None req-0f19979c-d609-46af-b70a-3aa130c0212c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Deleted allocations for instance 9aae3fde-f55a-4e90-a9b5-4594051183f4 [ 909.879984] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-0f19979c-d609-46af-b70a-3aa130c0212c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg cda5f6609ed34c2c8472942d27f51246 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 909.890091] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cda5f6609ed34c2c8472942d27f51246 [ 910.081292] env[62096]: DEBUG nova.network.neutron [None req-37030ed6-9fb4-44d5-b2b3-bbe259fd74a3 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: 60bc5e14-f495-48da-8db0-54d75b523822] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 910.081774] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-37030ed6-9fb4-44d5-b2b3-bbe259fd74a3 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Expecting reply to msg aebc6b88ffb94597b45d2a5821a3f3ab in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 910.089922] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg aebc6b88ffb94597b45d2a5821a3f3ab [ 910.132895] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0428f40-874b-40ce-82ae-9cc0a0cbdf0b {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.140414] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-671cbe13-c0a1-4534-88df-1f18d3a9363a {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.170544] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18519fd5-1fcf-4c2b-8e5b-f0d3c75108f3 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.177660] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7da5f6f4-4a6d-4dca-ba6c-c8c94b811fd3 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.190516] env[62096]: DEBUG nova.compute.provider_tree [None req-c70231a0-48a5-4b3b-b009-020c7f69a59b tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 910.191002] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c70231a0-48a5-4b3b-b009-020c7f69a59b tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Expecting reply to msg d25cdbe2304948f5b2a715669521586a in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 910.198291] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d25cdbe2304948f5b2a715669521586a [ 910.382125] env[62096]: DEBUG oslo_concurrency.lockutils [None req-0f19979c-d609-46af-b70a-3aa130c0212c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Lock "9aae3fde-f55a-4e90-a9b5-4594051183f4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 102.247s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 910.382779] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-cad51331-71f6-4082-9e97-6ae9fadbb222 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Expecting reply to msg 5f940db05c2e4b30bad48846a0ec920c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 910.393497] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5f940db05c2e4b30bad48846a0ec920c [ 910.584175] env[62096]: INFO nova.compute.manager [None req-37030ed6-9fb4-44d5-b2b3-bbe259fd74a3 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: 60bc5e14-f495-48da-8db0-54d75b523822] Took 1.02 seconds to deallocate network for instance. [ 910.585915] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-37030ed6-9fb4-44d5-b2b3-bbe259fd74a3 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Expecting reply to msg 7ba10114e8014672878ee9d0053d6ae1 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 910.616996] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7ba10114e8014672878ee9d0053d6ae1 [ 910.693701] env[62096]: DEBUG nova.scheduler.client.report [None req-c70231a0-48a5-4b3b-b009-020c7f69a59b tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 910.696144] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c70231a0-48a5-4b3b-b009-020c7f69a59b tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Expecting reply to msg 7f776834666f40f28e5314227cd7d38a in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 910.706870] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7f776834666f40f28e5314227cd7d38a [ 910.885104] env[62096]: DEBUG nova.compute.manager [None req-cad51331-71f6-4082-9e97-6ae9fadbb222 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] [instance: dc451ce2-65de-4497-ad48-fd776f73cb80] Starting instance... {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 910.886850] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-cad51331-71f6-4082-9e97-6ae9fadbb222 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Expecting reply to msg 3d9190409aee47efa4fbadedd1211706 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 910.923895] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3d9190409aee47efa4fbadedd1211706 [ 911.090629] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-37030ed6-9fb4-44d5-b2b3-bbe259fd74a3 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Expecting reply to msg f487d18f769a4625a4e8478238042be7 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 911.119932] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f487d18f769a4625a4e8478238042be7 [ 911.198730] env[62096]: DEBUG oslo_concurrency.lockutils [None req-c70231a0-48a5-4b3b-b009-020c7f69a59b tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.705s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 911.199230] env[62096]: ERROR nova.compute.manager [None req-c70231a0-48a5-4b3b-b009-020c7f69a59b tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port fb223d70-ad13-4c1b-8030-cc860bc8f729, please check neutron logs for more information. [ 911.199230] env[62096]: ERROR nova.compute.manager [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] Traceback (most recent call last): [ 911.199230] env[62096]: ERROR nova.compute.manager [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 911.199230] env[62096]: ERROR nova.compute.manager [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] self.driver.spawn(context, instance, image_meta, [ 911.199230] env[62096]: ERROR nova.compute.manager [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 911.199230] env[62096]: ERROR nova.compute.manager [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] self._vmops.spawn(context, instance, image_meta, injected_files, [ 911.199230] env[62096]: ERROR nova.compute.manager [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 911.199230] env[62096]: ERROR nova.compute.manager [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] vm_ref = self.build_virtual_machine(instance, [ 911.199230] env[62096]: ERROR nova.compute.manager [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 911.199230] env[62096]: ERROR nova.compute.manager [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] vif_infos = vmwarevif.get_vif_info(self._session, [ 911.199230] env[62096]: ERROR nova.compute.manager [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 911.199537] env[62096]: ERROR nova.compute.manager [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] for vif in network_info: [ 911.199537] env[62096]: ERROR nova.compute.manager [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 911.199537] env[62096]: ERROR nova.compute.manager [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] return self._sync_wrapper(fn, *args, **kwargs) [ 911.199537] env[62096]: ERROR nova.compute.manager [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 911.199537] env[62096]: ERROR nova.compute.manager [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] self.wait() [ 911.199537] env[62096]: ERROR nova.compute.manager [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 911.199537] env[62096]: ERROR nova.compute.manager [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] self[:] = self._gt.wait() [ 911.199537] env[62096]: ERROR nova.compute.manager [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 911.199537] env[62096]: ERROR nova.compute.manager [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] return self._exit_event.wait() [ 911.199537] env[62096]: ERROR nova.compute.manager [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 911.199537] env[62096]: ERROR nova.compute.manager [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] current.throw(*self._exc) [ 911.199537] env[62096]: ERROR nova.compute.manager [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 911.199537] env[62096]: ERROR nova.compute.manager [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] result = function(*args, **kwargs) [ 911.199873] env[62096]: ERROR nova.compute.manager [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 911.199873] env[62096]: ERROR nova.compute.manager [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] return func(*args, **kwargs) [ 911.199873] env[62096]: ERROR nova.compute.manager [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 911.199873] env[62096]: ERROR nova.compute.manager [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] raise e [ 911.199873] env[62096]: ERROR nova.compute.manager [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 911.199873] env[62096]: ERROR nova.compute.manager [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] nwinfo = self.network_api.allocate_for_instance( [ 911.199873] env[62096]: ERROR nova.compute.manager [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 911.199873] env[62096]: ERROR nova.compute.manager [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] created_port_ids = self._update_ports_for_instance( [ 911.199873] env[62096]: ERROR nova.compute.manager [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 911.199873] env[62096]: ERROR nova.compute.manager [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] with excutils.save_and_reraise_exception(): [ 911.199873] env[62096]: ERROR nova.compute.manager [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 911.199873] env[62096]: ERROR nova.compute.manager [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] self.force_reraise() [ 911.199873] env[62096]: ERROR nova.compute.manager [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 911.200226] env[62096]: ERROR nova.compute.manager [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] raise self.value [ 911.200226] env[62096]: ERROR nova.compute.manager [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 911.200226] env[62096]: ERROR nova.compute.manager [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] updated_port = self._update_port( [ 911.200226] env[62096]: ERROR nova.compute.manager [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 911.200226] env[62096]: ERROR nova.compute.manager [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] _ensure_no_port_binding_failure(port) [ 911.200226] env[62096]: ERROR nova.compute.manager [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 911.200226] env[62096]: ERROR nova.compute.manager [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] raise exception.PortBindingFailed(port_id=port['id']) [ 911.200226] env[62096]: ERROR nova.compute.manager [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] nova.exception.PortBindingFailed: Binding failed for port fb223d70-ad13-4c1b-8030-cc860bc8f729, please check neutron logs for more information. [ 911.200226] env[62096]: ERROR nova.compute.manager [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] [ 911.200226] env[62096]: DEBUG nova.compute.utils [None req-c70231a0-48a5-4b3b-b009-020c7f69a59b tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] Binding failed for port fb223d70-ad13-4c1b-8030-cc860bc8f729, please check neutron logs for more information. {{(pid=62096) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 911.201128] env[62096]: DEBUG oslo_concurrency.lockutils [None req-064fa4d0-df95-4930-985c-d30de278cda0 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.840s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 911.202957] env[62096]: INFO nova.compute.claims [None req-064fa4d0-df95-4930-985c-d30de278cda0 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] [instance: 8802af61-3692-4e27-85f4-945afccff231] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 911.204781] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-064fa4d0-df95-4930-985c-d30de278cda0 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Expecting reply to msg f3fafaec382048ef87083e7cbf6c237b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 911.205993] env[62096]: DEBUG nova.compute.manager [None req-c70231a0-48a5-4b3b-b009-020c7f69a59b tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] Build of instance f832a621-c221-4ae8-928e-d6f9fa6b0586 was re-scheduled: Binding failed for port fb223d70-ad13-4c1b-8030-cc860bc8f729, please check neutron logs for more information. {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 911.206419] env[62096]: DEBUG nova.compute.manager [None req-c70231a0-48a5-4b3b-b009-020c7f69a59b tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] Unplugging VIFs for instance {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 911.206648] env[62096]: DEBUG oslo_concurrency.lockutils [None req-c70231a0-48a5-4b3b-b009-020c7f69a59b tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Acquiring lock "refresh_cache-f832a621-c221-4ae8-928e-d6f9fa6b0586" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 911.206792] env[62096]: DEBUG oslo_concurrency.lockutils [None req-c70231a0-48a5-4b3b-b009-020c7f69a59b tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Acquired lock "refresh_cache-f832a621-c221-4ae8-928e-d6f9fa6b0586" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 911.206947] env[62096]: DEBUG nova.network.neutron [None req-c70231a0-48a5-4b3b-b009-020c7f69a59b tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 911.207296] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c70231a0-48a5-4b3b-b009-020c7f69a59b tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Expecting reply to msg 8ff4986a435a420e89ae9f72f50ed7c4 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 911.213041] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8ff4986a435a420e89ae9f72f50ed7c4 [ 911.259644] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f3fafaec382048ef87083e7cbf6c237b [ 911.396820] env[62096]: DEBUG oslo_concurrency.lockutils [None req-abde4e35-3d25-4bdf-ac75-6b77642d395c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Acquiring lock "6b8a00ab-b6be-46ad-989d-81692d1d7556" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 911.397127] env[62096]: DEBUG oslo_concurrency.lockutils [None req-abde4e35-3d25-4bdf-ac75-6b77642d395c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Lock "6b8a00ab-b6be-46ad-989d-81692d1d7556" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 911.409130] env[62096]: DEBUG oslo_concurrency.lockutils [None req-cad51331-71f6-4082-9e97-6ae9fadbb222 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 911.611946] env[62096]: INFO nova.scheduler.client.report [None req-37030ed6-9fb4-44d5-b2b3-bbe259fd74a3 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Deleted allocations for instance 60bc5e14-f495-48da-8db0-54d75b523822 [ 911.617842] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-37030ed6-9fb4-44d5-b2b3-bbe259fd74a3 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Expecting reply to msg a8ecb6be3b9b46a68342e50a307a1f83 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 911.629273] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a8ecb6be3b9b46a68342e50a307a1f83 [ 911.710193] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-064fa4d0-df95-4930-985c-d30de278cda0 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Expecting reply to msg b931c581f82e4b9dae120afa084b8024 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 911.716771] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b931c581f82e4b9dae120afa084b8024 [ 911.725648] env[62096]: DEBUG nova.network.neutron [None req-c70231a0-48a5-4b3b-b009-020c7f69a59b tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 911.804780] env[62096]: DEBUG nova.network.neutron [None req-c70231a0-48a5-4b3b-b009-020c7f69a59b tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 911.805366] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c70231a0-48a5-4b3b-b009-020c7f69a59b tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Expecting reply to msg de2227c93eef4b66baee45099321e320 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 911.813150] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg de2227c93eef4b66baee45099321e320 [ 912.120133] env[62096]: DEBUG oslo_concurrency.lockutils [None req-37030ed6-9fb4-44d5-b2b3-bbe259fd74a3 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Lock "60bc5e14-f495-48da-8db0-54d75b523822" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 95.605s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 912.120734] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-5865a84c-0be2-46e8-ab17-dbd444d3f325 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Expecting reply to msg b2c5182c9323436fa2eaae2a4ff4e357 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 912.129750] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b2c5182c9323436fa2eaae2a4ff4e357 [ 912.307091] env[62096]: DEBUG oslo_concurrency.lockutils [None req-c70231a0-48a5-4b3b-b009-020c7f69a59b tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Releasing lock "refresh_cache-f832a621-c221-4ae8-928e-d6f9fa6b0586" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 912.307247] env[62096]: DEBUG nova.compute.manager [None req-c70231a0-48a5-4b3b-b009-020c7f69a59b tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 912.307432] env[62096]: DEBUG nova.compute.manager [None req-c70231a0-48a5-4b3b-b009-020c7f69a59b tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 912.307628] env[62096]: DEBUG nova.network.neutron [None req-c70231a0-48a5-4b3b-b009-020c7f69a59b tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 912.323432] env[62096]: DEBUG nova.network.neutron [None req-c70231a0-48a5-4b3b-b009-020c7f69a59b tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 912.324042] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c70231a0-48a5-4b3b-b009-020c7f69a59b tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Expecting reply to msg 760559b2b03f476e97866edc8b6e7d15 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 912.330208] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 760559b2b03f476e97866edc8b6e7d15 [ 912.363899] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a139339a-247b-4ee7-9e2c-e492b887e45f {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.371616] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-204f69e7-3e42-4563-a4b6-dd61845ce023 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.400931] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab247c03-8bd1-4571-b017-597460cc9e11 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.407766] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d4ef513-5961-4cad-a521-ad016371ab23 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.420026] env[62096]: DEBUG nova.compute.provider_tree [None req-064fa4d0-df95-4930-985c-d30de278cda0 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 912.420627] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-064fa4d0-df95-4930-985c-d30de278cda0 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Expecting reply to msg ec410e9fe4a848039d9ae2e1c8ace806 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 912.427459] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ec410e9fe4a848039d9ae2e1c8ace806 [ 912.623004] env[62096]: DEBUG nova.compute.manager [None req-5865a84c-0be2-46e8-ab17-dbd444d3f325 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] [instance: 9f3ce570-878f-48bf-a08c-0387b0556785] Starting instance... {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 912.624855] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-5865a84c-0be2-46e8-ab17-dbd444d3f325 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Expecting reply to msg 46266d9db61a4914b55e6b4976663635 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 912.656147] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 46266d9db61a4914b55e6b4976663635 [ 912.826009] env[62096]: DEBUG nova.network.neutron [None req-c70231a0-48a5-4b3b-b009-020c7f69a59b tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 912.826904] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c70231a0-48a5-4b3b-b009-020c7f69a59b tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Expecting reply to msg 822c35735df046f9a0e5c5f71a04acf3 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 912.835633] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 822c35735df046f9a0e5c5f71a04acf3 [ 912.923957] env[62096]: DEBUG nova.scheduler.client.report [None req-064fa4d0-df95-4930-985c-d30de278cda0 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 912.926587] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-064fa4d0-df95-4930-985c-d30de278cda0 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Expecting reply to msg f32a0313539644b9ba48632e551f1913 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 912.949483] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f32a0313539644b9ba48632e551f1913 [ 913.149834] env[62096]: DEBUG oslo_concurrency.lockutils [None req-5865a84c-0be2-46e8-ab17-dbd444d3f325 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 913.293418] env[62096]: DEBUG oslo_concurrency.lockutils [None req-ee01a51e-dfc7-48ec-b8ca-f308df27a4d0 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Acquiring lock "a960bdaa-ed46-4f5e-ad1a-56b06589c362" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 913.293675] env[62096]: DEBUG oslo_concurrency.lockutils [None req-ee01a51e-dfc7-48ec-b8ca-f308df27a4d0 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Lock "a960bdaa-ed46-4f5e-ad1a-56b06589c362" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 913.329029] env[62096]: INFO nova.compute.manager [None req-c70231a0-48a5-4b3b-b009-020c7f69a59b tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] [instance: f832a621-c221-4ae8-928e-d6f9fa6b0586] Took 1.02 seconds to deallocate network for instance. [ 913.330878] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c70231a0-48a5-4b3b-b009-020c7f69a59b tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Expecting reply to msg 8170c728cfc54001a17b1f356e66969b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 913.363746] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8170c728cfc54001a17b1f356e66969b [ 913.450135] env[62096]: DEBUG oslo_concurrency.lockutils [None req-064fa4d0-df95-4930-985c-d30de278cda0 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.249s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 913.450662] env[62096]: DEBUG nova.compute.manager [None req-064fa4d0-df95-4930-985c-d30de278cda0 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] [instance: 8802af61-3692-4e27-85f4-945afccff231] Start building networks asynchronously for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 913.452465] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-064fa4d0-df95-4930-985c-d30de278cda0 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Expecting reply to msg b52e3cb9ecdc48fb9d92d1fa0c62c84a in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 913.453771] env[62096]: DEBUG oslo_concurrency.lockutils [None req-66bcc827-9e2c-4570-a904-739ded1c169f tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 20.442s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 913.455655] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-66bcc827-9e2c-4570-a904-739ded1c169f tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Expecting reply to msg e433e09e3be94cb0bcb79ef2e66b3a0f in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 913.482599] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b52e3cb9ecdc48fb9d92d1fa0c62c84a [ 913.484786] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e433e09e3be94cb0bcb79ef2e66b3a0f [ 913.835974] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c70231a0-48a5-4b3b-b009-020c7f69a59b tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Expecting reply to msg 0aa6168fdf8f4fad8a0a306ba9527b63 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 913.867487] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0aa6168fdf8f4fad8a0a306ba9527b63 [ 913.961372] env[62096]: DEBUG nova.compute.utils [None req-064fa4d0-df95-4930-985c-d30de278cda0 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Using /dev/sd instead of None {{(pid=62096) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 913.961916] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-064fa4d0-df95-4930-985c-d30de278cda0 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Expecting reply to msg d1865c5a930742b0b027651fe9c6a0dc in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 913.962924] env[62096]: DEBUG nova.compute.manager [None req-064fa4d0-df95-4930-985c-d30de278cda0 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] [instance: 8802af61-3692-4e27-85f4-945afccff231] Allocating IP information in the background. {{(pid=62096) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 913.963090] env[62096]: DEBUG nova.network.neutron [None req-064fa4d0-df95-4930-985c-d30de278cda0 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] [instance: 8802af61-3692-4e27-85f4-945afccff231] allocate_for_instance() {{(pid=62096) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 913.971612] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d1865c5a930742b0b027651fe9c6a0dc [ 914.038563] env[62096]: DEBUG nova.policy [None req-064fa4d0-df95-4930-985c-d30de278cda0 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6f956b78c87f49c29ac1d804316f1896', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '205e7b01639c499baadc35fb26fba6ff', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62096) authorize /opt/stack/nova/nova/policy.py:203}} [ 914.169064] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5852b394-94f3-45dc-9515-b404bdda548f {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.176913] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84cb282e-cc47-4f40-9624-c570b04cdfd5 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.205918] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cb9067e-3f73-4a74-887d-8ce88b8518fd {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.216126] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aed3f30c-83b8-4bda-a97e-6e61d78bccb2 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.227090] env[62096]: DEBUG nova.compute.provider_tree [None req-66bcc827-9e2c-4570-a904-739ded1c169f tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 914.227606] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-66bcc827-9e2c-4570-a904-739ded1c169f tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Expecting reply to msg 59d79e839d9c4c42a7f7f18cde675eb2 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 914.234928] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 59d79e839d9c4c42a7f7f18cde675eb2 [ 914.356949] env[62096]: DEBUG nova.network.neutron [None req-064fa4d0-df95-4930-985c-d30de278cda0 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] [instance: 8802af61-3692-4e27-85f4-945afccff231] Successfully created port: 3e05661d-2e87-4072-aba5-687246e50404 {{(pid=62096) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 914.360885] env[62096]: INFO nova.scheduler.client.report [None req-c70231a0-48a5-4b3b-b009-020c7f69a59b tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Deleted allocations for instance f832a621-c221-4ae8-928e-d6f9fa6b0586 [ 914.369517] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c70231a0-48a5-4b3b-b009-020c7f69a59b tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Expecting reply to msg 75a4eeca366c4ae1858eb0520b5f8355 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 914.385353] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 75a4eeca366c4ae1858eb0520b5f8355 [ 914.469049] env[62096]: DEBUG nova.compute.manager [None req-064fa4d0-df95-4930-985c-d30de278cda0 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] [instance: 8802af61-3692-4e27-85f4-945afccff231] Start building block device mappings for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 914.469049] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-064fa4d0-df95-4930-985c-d30de278cda0 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Expecting reply to msg 34a5469e76a447faadc7aa54c2e1c27c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 914.499755] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 34a5469e76a447faadc7aa54c2e1c27c [ 914.731626] env[62096]: DEBUG nova.scheduler.client.report [None req-66bcc827-9e2c-4570-a904-739ded1c169f tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 914.733389] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-66bcc827-9e2c-4570-a904-739ded1c169f tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Expecting reply to msg 6b5c3c27f7f14a409ffa3e5a5a350afa in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 914.743928] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6b5c3c27f7f14a409ffa3e5a5a350afa [ 914.873181] env[62096]: DEBUG oslo_concurrency.lockutils [None req-c70231a0-48a5-4b3b-b009-020c7f69a59b tempest-AttachVolumeShelveTestJSON-637009404 tempest-AttachVolumeShelveTestJSON-637009404-project-member] Lock "f832a621-c221-4ae8-928e-d6f9fa6b0586" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 90.643s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 914.873181] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-abde4e35-3d25-4bdf-ac75-6b77642d395c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg a7f3b7db40404b0ab9cca4f325f0f08a in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 914.897433] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a7f3b7db40404b0ab9cca4f325f0f08a [ 914.969954] env[62096]: DEBUG nova.compute.manager [req-c3f63af1-b333-43a9-aa1b-f70c3ebb7f77 req-26b418f0-d467-4cf7-98d0-317d29950a2a service nova] [instance: 8802af61-3692-4e27-85f4-945afccff231] Received event network-changed-3e05661d-2e87-4072-aba5-687246e50404 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 914.970397] env[62096]: DEBUG nova.compute.manager [req-c3f63af1-b333-43a9-aa1b-f70c3ebb7f77 req-26b418f0-d467-4cf7-98d0-317d29950a2a service nova] [instance: 8802af61-3692-4e27-85f4-945afccff231] Refreshing instance network info cache due to event network-changed-3e05661d-2e87-4072-aba5-687246e50404. {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 914.970786] env[62096]: DEBUG oslo_concurrency.lockutils [req-c3f63af1-b333-43a9-aa1b-f70c3ebb7f77 req-26b418f0-d467-4cf7-98d0-317d29950a2a service nova] Acquiring lock "refresh_cache-8802af61-3692-4e27-85f4-945afccff231" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 914.971071] env[62096]: DEBUG oslo_concurrency.lockutils [req-c3f63af1-b333-43a9-aa1b-f70c3ebb7f77 req-26b418f0-d467-4cf7-98d0-317d29950a2a service nova] Acquired lock "refresh_cache-8802af61-3692-4e27-85f4-945afccff231" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 914.971792] env[62096]: DEBUG nova.network.neutron [req-c3f63af1-b333-43a9-aa1b-f70c3ebb7f77 req-26b418f0-d467-4cf7-98d0-317d29950a2a service nova] [instance: 8802af61-3692-4e27-85f4-945afccff231] Refreshing network info cache for port 3e05661d-2e87-4072-aba5-687246e50404 {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 914.972480] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-c3f63af1-b333-43a9-aa1b-f70c3ebb7f77 req-26b418f0-d467-4cf7-98d0-317d29950a2a service nova] Expecting reply to msg 54bd5f60ab0542b89b57167255a3e534 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 914.976043] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-064fa4d0-df95-4930-985c-d30de278cda0 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Expecting reply to msg c7bff7f922bf43cdad7b83fcb9dd90b5 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 914.979225] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 54bd5f60ab0542b89b57167255a3e534 [ 915.011298] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c7bff7f922bf43cdad7b83fcb9dd90b5 [ 915.149614] env[62096]: ERROR nova.compute.manager [None req-064fa4d0-df95-4930-985c-d30de278cda0 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 3e05661d-2e87-4072-aba5-687246e50404, please check neutron logs for more information. [ 915.149614] env[62096]: ERROR nova.compute.manager Traceback (most recent call last): [ 915.149614] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 915.149614] env[62096]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 915.149614] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 915.149614] env[62096]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 915.149614] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 915.149614] env[62096]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 915.149614] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 915.149614] env[62096]: ERROR nova.compute.manager self.force_reraise() [ 915.149614] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 915.149614] env[62096]: ERROR nova.compute.manager raise self.value [ 915.149614] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 915.149614] env[62096]: ERROR nova.compute.manager updated_port = self._update_port( [ 915.149614] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 915.149614] env[62096]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 915.150074] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 915.150074] env[62096]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 915.150074] env[62096]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 3e05661d-2e87-4072-aba5-687246e50404, please check neutron logs for more information. [ 915.150074] env[62096]: ERROR nova.compute.manager [ 915.150074] env[62096]: Traceback (most recent call last): [ 915.150074] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 915.150074] env[62096]: listener.cb(fileno) [ 915.150074] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 915.150074] env[62096]: result = function(*args, **kwargs) [ 915.150074] env[62096]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 915.150074] env[62096]: return func(*args, **kwargs) [ 915.150074] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 915.150074] env[62096]: raise e [ 915.150074] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 915.150074] env[62096]: nwinfo = self.network_api.allocate_for_instance( [ 915.150074] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 915.150074] env[62096]: created_port_ids = self._update_ports_for_instance( [ 915.150074] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 915.150074] env[62096]: with excutils.save_and_reraise_exception(): [ 915.150074] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 915.150074] env[62096]: self.force_reraise() [ 915.150074] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 915.150074] env[62096]: raise self.value [ 915.150074] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 915.150074] env[62096]: updated_port = self._update_port( [ 915.150074] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 915.150074] env[62096]: _ensure_no_port_binding_failure(port) [ 915.150074] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 915.150074] env[62096]: raise exception.PortBindingFailed(port_id=port['id']) [ 915.150834] env[62096]: nova.exception.PortBindingFailed: Binding failed for port 3e05661d-2e87-4072-aba5-687246e50404, please check neutron logs for more information. [ 915.150834] env[62096]: Removing descriptor: 14 [ 915.240500] env[62096]: DEBUG oslo_concurrency.lockutils [None req-66bcc827-9e2c-4570-a904-739ded1c169f tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.782s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 915.240500] env[62096]: ERROR nova.compute.manager [None req-66bcc827-9e2c-4570-a904-739ded1c169f tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port e927db6d-5558-4472-a05c-256404ce7a0e, please check neutron logs for more information. [ 915.240500] env[62096]: ERROR nova.compute.manager [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] Traceback (most recent call last): [ 915.240500] env[62096]: ERROR nova.compute.manager [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 915.240500] env[62096]: ERROR nova.compute.manager [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] self.driver.spawn(context, instance, image_meta, [ 915.240500] env[62096]: ERROR nova.compute.manager [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 915.240500] env[62096]: ERROR nova.compute.manager [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] self._vmops.spawn(context, instance, image_meta, injected_files, [ 915.240500] env[62096]: ERROR nova.compute.manager [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 915.240500] env[62096]: ERROR nova.compute.manager [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] vm_ref = self.build_virtual_machine(instance, [ 915.240864] env[62096]: ERROR nova.compute.manager [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 915.240864] env[62096]: ERROR nova.compute.manager [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] vif_infos = vmwarevif.get_vif_info(self._session, [ 915.240864] env[62096]: ERROR nova.compute.manager [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 915.240864] env[62096]: ERROR nova.compute.manager [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] for vif in network_info: [ 915.240864] env[62096]: ERROR nova.compute.manager [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 915.240864] env[62096]: ERROR nova.compute.manager [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] return self._sync_wrapper(fn, *args, **kwargs) [ 915.240864] env[62096]: ERROR nova.compute.manager [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 915.240864] env[62096]: ERROR nova.compute.manager [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] self.wait() [ 915.240864] env[62096]: ERROR nova.compute.manager [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 915.240864] env[62096]: ERROR nova.compute.manager [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] self[:] = self._gt.wait() [ 915.240864] env[62096]: ERROR nova.compute.manager [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 915.240864] env[62096]: ERROR nova.compute.manager [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] return self._exit_event.wait() [ 915.240864] env[62096]: ERROR nova.compute.manager [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 915.241209] env[62096]: ERROR nova.compute.manager [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] current.throw(*self._exc) [ 915.241209] env[62096]: ERROR nova.compute.manager [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 915.241209] env[62096]: ERROR nova.compute.manager [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] result = function(*args, **kwargs) [ 915.241209] env[62096]: ERROR nova.compute.manager [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 915.241209] env[62096]: ERROR nova.compute.manager [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] return func(*args, **kwargs) [ 915.241209] env[62096]: ERROR nova.compute.manager [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 915.241209] env[62096]: ERROR nova.compute.manager [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] raise e [ 915.241209] env[62096]: ERROR nova.compute.manager [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 915.241209] env[62096]: ERROR nova.compute.manager [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] nwinfo = self.network_api.allocate_for_instance( [ 915.241209] env[62096]: ERROR nova.compute.manager [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 915.241209] env[62096]: ERROR nova.compute.manager [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] created_port_ids = self._update_ports_for_instance( [ 915.241209] env[62096]: ERROR nova.compute.manager [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 915.241209] env[62096]: ERROR nova.compute.manager [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] with excutils.save_and_reraise_exception(): [ 915.241578] env[62096]: ERROR nova.compute.manager [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 915.241578] env[62096]: ERROR nova.compute.manager [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] self.force_reraise() [ 915.241578] env[62096]: ERROR nova.compute.manager [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 915.241578] env[62096]: ERROR nova.compute.manager [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] raise self.value [ 915.241578] env[62096]: ERROR nova.compute.manager [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 915.241578] env[62096]: ERROR nova.compute.manager [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] updated_port = self._update_port( [ 915.241578] env[62096]: ERROR nova.compute.manager [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 915.241578] env[62096]: ERROR nova.compute.manager [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] _ensure_no_port_binding_failure(port) [ 915.241578] env[62096]: ERROR nova.compute.manager [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 915.241578] env[62096]: ERROR nova.compute.manager [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] raise exception.PortBindingFailed(port_id=port['id']) [ 915.241578] env[62096]: ERROR nova.compute.manager [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] nova.exception.PortBindingFailed: Binding failed for port e927db6d-5558-4472-a05c-256404ce7a0e, please check neutron logs for more information. [ 915.241578] env[62096]: ERROR nova.compute.manager [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] [ 915.241922] env[62096]: DEBUG nova.compute.utils [None req-66bcc827-9e2c-4570-a904-739ded1c169f tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] Binding failed for port e927db6d-5558-4472-a05c-256404ce7a0e, please check neutron logs for more information. {{(pid=62096) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 915.241922] env[62096]: DEBUG oslo_concurrency.lockutils [None req-85f59f7f-6abf-41c4-87c1-d5831c2c0f52 tempest-ServersAdmin275Test-238925842 tempest-ServersAdmin275Test-238925842-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 21.046s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 915.241922] env[62096]: DEBUG nova.objects.instance [None req-85f59f7f-6abf-41c4-87c1-d5831c2c0f52 tempest-ServersAdmin275Test-238925842 tempest-ServersAdmin275Test-238925842-project-admin] [instance: 5627f913-29d2-476e-8fde-8ea457cc56f2] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62096) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 915.241922] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-85f59f7f-6abf-41c4-87c1-d5831c2c0f52 tempest-ServersAdmin275Test-238925842 tempest-ServersAdmin275Test-238925842-project-admin] Expecting reply to msg 3ee2b6b20aa640c996a9aaaec8ba7af9 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 915.243589] env[62096]: DEBUG nova.compute.manager [None req-66bcc827-9e2c-4570-a904-739ded1c169f tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] Build of instance ce80b353-2f7c-4165-b4fa-b81a1e539295 was re-scheduled: Binding failed for port e927db6d-5558-4472-a05c-256404ce7a0e, please check neutron logs for more information. {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 915.244262] env[62096]: DEBUG nova.compute.manager [None req-66bcc827-9e2c-4570-a904-739ded1c169f tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] Unplugging VIFs for instance {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 915.244613] env[62096]: DEBUG oslo_concurrency.lockutils [None req-66bcc827-9e2c-4570-a904-739ded1c169f tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Acquiring lock "refresh_cache-ce80b353-2f7c-4165-b4fa-b81a1e539295" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 915.244892] env[62096]: DEBUG oslo_concurrency.lockutils [None req-66bcc827-9e2c-4570-a904-739ded1c169f tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Acquired lock "refresh_cache-ce80b353-2f7c-4165-b4fa-b81a1e539295" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 915.245196] env[62096]: DEBUG nova.network.neutron [None req-66bcc827-9e2c-4570-a904-739ded1c169f tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 915.245684] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-66bcc827-9e2c-4570-a904-739ded1c169f tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Expecting reply to msg ca2bce4318e44e48bec02ee4c29d5b40 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 915.255266] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ca2bce4318e44e48bec02ee4c29d5b40 [ 915.287879] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3ee2b6b20aa640c996a9aaaec8ba7af9 [ 915.375294] env[62096]: DEBUG nova.compute.manager [None req-abde4e35-3d25-4bdf-ac75-6b77642d395c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] Starting instance... {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 915.377393] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-abde4e35-3d25-4bdf-ac75-6b77642d395c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg 23d9705ecdf94f7ba9f30379506a6d5d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 915.409318] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 23d9705ecdf94f7ba9f30379506a6d5d [ 915.482009] env[62096]: DEBUG nova.compute.manager [None req-064fa4d0-df95-4930-985c-d30de278cda0 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] [instance: 8802af61-3692-4e27-85f4-945afccff231] Start spawning the instance on the hypervisor. {{(pid=62096) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 915.496819] env[62096]: DEBUG nova.network.neutron [req-c3f63af1-b333-43a9-aa1b-f70c3ebb7f77 req-26b418f0-d467-4cf7-98d0-317d29950a2a service nova] [instance: 8802af61-3692-4e27-85f4-945afccff231] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 915.507495] env[62096]: DEBUG nova.virt.hardware [None req-064fa4d0-df95-4930-985c-d30de278cda0 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-19T11:52:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-19T11:51:55Z,direct_url=,disk_format='vmdk',id=fb764baa-3805-45c4-a694-aa91b0932110,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bd76085d004b408bbcf1ebf76ba2593e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-19T11:51:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 915.508158] env[62096]: DEBUG nova.virt.hardware [None req-064fa4d0-df95-4930-985c-d30de278cda0 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Flavor limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 915.508491] env[62096]: DEBUG nova.virt.hardware [None req-064fa4d0-df95-4930-985c-d30de278cda0 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Image limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 915.508823] env[62096]: DEBUG nova.virt.hardware [None req-064fa4d0-df95-4930-985c-d30de278cda0 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Flavor pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 915.509084] env[62096]: DEBUG nova.virt.hardware [None req-064fa4d0-df95-4930-985c-d30de278cda0 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Image pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 915.509343] env[62096]: DEBUG nova.virt.hardware [None req-064fa4d0-df95-4930-985c-d30de278cda0 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 915.509714] env[62096]: DEBUG nova.virt.hardware [None req-064fa4d0-df95-4930-985c-d30de278cda0 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 915.509977] env[62096]: DEBUG nova.virt.hardware [None req-064fa4d0-df95-4930-985c-d30de278cda0 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 915.510275] env[62096]: DEBUG nova.virt.hardware [None req-064fa4d0-df95-4930-985c-d30de278cda0 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Got 1 possible topologies {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 915.510691] env[62096]: DEBUG nova.virt.hardware [None req-064fa4d0-df95-4930-985c-d30de278cda0 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 915.511000] env[62096]: DEBUG nova.virt.hardware [None req-064fa4d0-df95-4930-985c-d30de278cda0 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 915.512036] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee7ca1bc-3acf-43ec-ae25-45da661ab033 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.523058] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-becd8914-a1f4-4d8e-9f1f-9ab08fa5b0b8 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.535876] env[62096]: ERROR nova.compute.manager [None req-064fa4d0-df95-4930-985c-d30de278cda0 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] [instance: 8802af61-3692-4e27-85f4-945afccff231] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 3e05661d-2e87-4072-aba5-687246e50404, please check neutron logs for more information. [ 915.535876] env[62096]: ERROR nova.compute.manager [instance: 8802af61-3692-4e27-85f4-945afccff231] Traceback (most recent call last): [ 915.535876] env[62096]: ERROR nova.compute.manager [instance: 8802af61-3692-4e27-85f4-945afccff231] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 915.535876] env[62096]: ERROR nova.compute.manager [instance: 8802af61-3692-4e27-85f4-945afccff231] yield resources [ 915.535876] env[62096]: ERROR nova.compute.manager [instance: 8802af61-3692-4e27-85f4-945afccff231] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 915.535876] env[62096]: ERROR nova.compute.manager [instance: 8802af61-3692-4e27-85f4-945afccff231] self.driver.spawn(context, instance, image_meta, [ 915.535876] env[62096]: ERROR nova.compute.manager [instance: 8802af61-3692-4e27-85f4-945afccff231] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 915.535876] env[62096]: ERROR nova.compute.manager [instance: 8802af61-3692-4e27-85f4-945afccff231] self._vmops.spawn(context, instance, image_meta, injected_files, [ 915.535876] env[62096]: ERROR nova.compute.manager [instance: 8802af61-3692-4e27-85f4-945afccff231] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 915.535876] env[62096]: ERROR nova.compute.manager [instance: 8802af61-3692-4e27-85f4-945afccff231] vm_ref = self.build_virtual_machine(instance, [ 915.535876] env[62096]: ERROR nova.compute.manager [instance: 8802af61-3692-4e27-85f4-945afccff231] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 915.536263] env[62096]: ERROR nova.compute.manager [instance: 8802af61-3692-4e27-85f4-945afccff231] vif_infos = vmwarevif.get_vif_info(self._session, [ 915.536263] env[62096]: ERROR nova.compute.manager [instance: 8802af61-3692-4e27-85f4-945afccff231] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 915.536263] env[62096]: ERROR nova.compute.manager [instance: 8802af61-3692-4e27-85f4-945afccff231] for vif in network_info: [ 915.536263] env[62096]: ERROR nova.compute.manager [instance: 8802af61-3692-4e27-85f4-945afccff231] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 915.536263] env[62096]: ERROR nova.compute.manager [instance: 8802af61-3692-4e27-85f4-945afccff231] return self._sync_wrapper(fn, *args, **kwargs) [ 915.536263] env[62096]: ERROR nova.compute.manager [instance: 8802af61-3692-4e27-85f4-945afccff231] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 915.536263] env[62096]: ERROR nova.compute.manager [instance: 8802af61-3692-4e27-85f4-945afccff231] self.wait() [ 915.536263] env[62096]: ERROR nova.compute.manager [instance: 8802af61-3692-4e27-85f4-945afccff231] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 915.536263] env[62096]: ERROR nova.compute.manager [instance: 8802af61-3692-4e27-85f4-945afccff231] self[:] = self._gt.wait() [ 915.536263] env[62096]: ERROR nova.compute.manager [instance: 8802af61-3692-4e27-85f4-945afccff231] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 915.536263] env[62096]: ERROR nova.compute.manager [instance: 8802af61-3692-4e27-85f4-945afccff231] return self._exit_event.wait() [ 915.536263] env[62096]: ERROR nova.compute.manager [instance: 8802af61-3692-4e27-85f4-945afccff231] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 915.536263] env[62096]: ERROR nova.compute.manager [instance: 8802af61-3692-4e27-85f4-945afccff231] current.throw(*self._exc) [ 915.536668] env[62096]: ERROR nova.compute.manager [instance: 8802af61-3692-4e27-85f4-945afccff231] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 915.536668] env[62096]: ERROR nova.compute.manager [instance: 8802af61-3692-4e27-85f4-945afccff231] result = function(*args, **kwargs) [ 915.536668] env[62096]: ERROR nova.compute.manager [instance: 8802af61-3692-4e27-85f4-945afccff231] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 915.536668] env[62096]: ERROR nova.compute.manager [instance: 8802af61-3692-4e27-85f4-945afccff231] return func(*args, **kwargs) [ 915.536668] env[62096]: ERROR nova.compute.manager [instance: 8802af61-3692-4e27-85f4-945afccff231] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 915.536668] env[62096]: ERROR nova.compute.manager [instance: 8802af61-3692-4e27-85f4-945afccff231] raise e [ 915.536668] env[62096]: ERROR nova.compute.manager [instance: 8802af61-3692-4e27-85f4-945afccff231] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 915.536668] env[62096]: ERROR nova.compute.manager [instance: 8802af61-3692-4e27-85f4-945afccff231] nwinfo = self.network_api.allocate_for_instance( [ 915.536668] env[62096]: ERROR nova.compute.manager [instance: 8802af61-3692-4e27-85f4-945afccff231] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 915.536668] env[62096]: ERROR nova.compute.manager [instance: 8802af61-3692-4e27-85f4-945afccff231] created_port_ids = self._update_ports_for_instance( [ 915.536668] env[62096]: ERROR nova.compute.manager [instance: 8802af61-3692-4e27-85f4-945afccff231] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 915.536668] env[62096]: ERROR nova.compute.manager [instance: 8802af61-3692-4e27-85f4-945afccff231] with excutils.save_and_reraise_exception(): [ 915.536668] env[62096]: ERROR nova.compute.manager [instance: 8802af61-3692-4e27-85f4-945afccff231] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 915.537014] env[62096]: ERROR nova.compute.manager [instance: 8802af61-3692-4e27-85f4-945afccff231] self.force_reraise() [ 915.537014] env[62096]: ERROR nova.compute.manager [instance: 8802af61-3692-4e27-85f4-945afccff231] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 915.537014] env[62096]: ERROR nova.compute.manager [instance: 8802af61-3692-4e27-85f4-945afccff231] raise self.value [ 915.537014] env[62096]: ERROR nova.compute.manager [instance: 8802af61-3692-4e27-85f4-945afccff231] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 915.537014] env[62096]: ERROR nova.compute.manager [instance: 8802af61-3692-4e27-85f4-945afccff231] updated_port = self._update_port( [ 915.537014] env[62096]: ERROR nova.compute.manager [instance: 8802af61-3692-4e27-85f4-945afccff231] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 915.537014] env[62096]: ERROR nova.compute.manager [instance: 8802af61-3692-4e27-85f4-945afccff231] _ensure_no_port_binding_failure(port) [ 915.537014] env[62096]: ERROR nova.compute.manager [instance: 8802af61-3692-4e27-85f4-945afccff231] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 915.537014] env[62096]: ERROR nova.compute.manager [instance: 8802af61-3692-4e27-85f4-945afccff231] raise exception.PortBindingFailed(port_id=port['id']) [ 915.537014] env[62096]: ERROR nova.compute.manager [instance: 8802af61-3692-4e27-85f4-945afccff231] nova.exception.PortBindingFailed: Binding failed for port 3e05661d-2e87-4072-aba5-687246e50404, please check neutron logs for more information. [ 915.537014] env[62096]: ERROR nova.compute.manager [instance: 8802af61-3692-4e27-85f4-945afccff231] [ 915.537014] env[62096]: INFO nova.compute.manager [None req-064fa4d0-df95-4930-985c-d30de278cda0 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] [instance: 8802af61-3692-4e27-85f4-945afccff231] Terminating instance [ 915.540613] env[62096]: DEBUG oslo_concurrency.lockutils [None req-064fa4d0-df95-4930-985c-d30de278cda0 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Acquiring lock "refresh_cache-8802af61-3692-4e27-85f4-945afccff231" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 915.576528] env[62096]: DEBUG nova.network.neutron [req-c3f63af1-b333-43a9-aa1b-f70c3ebb7f77 req-26b418f0-d467-4cf7-98d0-317d29950a2a service nova] [instance: 8802af61-3692-4e27-85f4-945afccff231] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 915.577366] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-c3f63af1-b333-43a9-aa1b-f70c3ebb7f77 req-26b418f0-d467-4cf7-98d0-317d29950a2a service nova] Expecting reply to msg 45ebfa39d9764b09890dd73428b5fa9f in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 915.585504] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 45ebfa39d9764b09890dd73428b5fa9f [ 915.745982] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-85f59f7f-6abf-41c4-87c1-d5831c2c0f52 tempest-ServersAdmin275Test-238925842 tempest-ServersAdmin275Test-238925842-project-admin] Expecting reply to msg 939b44009efa4c778700baa8e77d3472 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 915.756628] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 939b44009efa4c778700baa8e77d3472 [ 915.768571] env[62096]: DEBUG nova.network.neutron [None req-66bcc827-9e2c-4570-a904-739ded1c169f tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 915.839480] env[62096]: DEBUG nova.network.neutron [None req-66bcc827-9e2c-4570-a904-739ded1c169f tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 915.839875] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-66bcc827-9e2c-4570-a904-739ded1c169f tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Expecting reply to msg 92e84a6f1e704953913fd0aaf0a8ed8a in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 915.848728] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 92e84a6f1e704953913fd0aaf0a8ed8a [ 915.897479] env[62096]: DEBUG oslo_concurrency.lockutils [None req-abde4e35-3d25-4bdf-ac75-6b77642d395c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 916.079470] env[62096]: DEBUG oslo_concurrency.lockutils [req-c3f63af1-b333-43a9-aa1b-f70c3ebb7f77 req-26b418f0-d467-4cf7-98d0-317d29950a2a service nova] Releasing lock "refresh_cache-8802af61-3692-4e27-85f4-945afccff231" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 916.079913] env[62096]: DEBUG oslo_concurrency.lockutils [None req-064fa4d0-df95-4930-985c-d30de278cda0 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Acquired lock "refresh_cache-8802af61-3692-4e27-85f4-945afccff231" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 916.080123] env[62096]: DEBUG nova.network.neutron [None req-064fa4d0-df95-4930-985c-d30de278cda0 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] [instance: 8802af61-3692-4e27-85f4-945afccff231] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 916.080565] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-064fa4d0-df95-4930-985c-d30de278cda0 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Expecting reply to msg 921c1d796765427e8a06f643a26db76b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 916.115330] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 921c1d796765427e8a06f643a26db76b [ 916.252683] env[62096]: DEBUG oslo_concurrency.lockutils [None req-85f59f7f-6abf-41c4-87c1-d5831c2c0f52 tempest-ServersAdmin275Test-238925842 tempest-ServersAdmin275Test-238925842-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.014s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 916.253075] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-85f59f7f-6abf-41c4-87c1-d5831c2c0f52 tempest-ServersAdmin275Test-238925842 tempest-ServersAdmin275Test-238925842-project-admin] Expecting reply to msg ae2cd1a13a0749b2b53ac42b4aaf95e6 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 916.253954] env[62096]: DEBUG oslo_concurrency.lockutils [None req-d8bc9aca-755b-4171-8a37-b8947b203c4b tempest-ServerPasswordTestJSON-1799855642 tempest-ServerPasswordTestJSON-1799855642-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 21.724s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 916.255733] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d8bc9aca-755b-4171-8a37-b8947b203c4b tempest-ServerPasswordTestJSON-1799855642 tempest-ServerPasswordTestJSON-1799855642-project-member] Expecting reply to msg f9fd01fec7cb49cb867577272263afbf in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 916.267361] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ae2cd1a13a0749b2b53ac42b4aaf95e6 [ 916.296037] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f9fd01fec7cb49cb867577272263afbf [ 916.341903] env[62096]: DEBUG oslo_concurrency.lockutils [None req-66bcc827-9e2c-4570-a904-739ded1c169f tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Releasing lock "refresh_cache-ce80b353-2f7c-4165-b4fa-b81a1e539295" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 916.342145] env[62096]: DEBUG nova.compute.manager [None req-66bcc827-9e2c-4570-a904-739ded1c169f tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 916.342329] env[62096]: DEBUG nova.compute.manager [None req-66bcc827-9e2c-4570-a904-739ded1c169f tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 916.342498] env[62096]: DEBUG nova.network.neutron [None req-66bcc827-9e2c-4570-a904-739ded1c169f tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 916.358437] env[62096]: DEBUG nova.network.neutron [None req-66bcc827-9e2c-4570-a904-739ded1c169f tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 916.359081] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-66bcc827-9e2c-4570-a904-739ded1c169f tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Expecting reply to msg 7d07ca2102a54997a4dff5ac54498dad in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 916.366561] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7d07ca2102a54997a4dff5ac54498dad [ 916.597363] env[62096]: DEBUG nova.network.neutron [None req-064fa4d0-df95-4930-985c-d30de278cda0 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] [instance: 8802af61-3692-4e27-85f4-945afccff231] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 916.668606] env[62096]: DEBUG nova.network.neutron [None req-064fa4d0-df95-4930-985c-d30de278cda0 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] [instance: 8802af61-3692-4e27-85f4-945afccff231] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 916.669180] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-064fa4d0-df95-4930-985c-d30de278cda0 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Expecting reply to msg b558df74413c4f6b9b214df02ec00f75 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 916.677172] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b558df74413c4f6b9b214df02ec00f75 [ 916.860732] env[62096]: DEBUG nova.network.neutron [None req-66bcc827-9e2c-4570-a904-739ded1c169f tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 916.861235] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-66bcc827-9e2c-4570-a904-739ded1c169f tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Expecting reply to msg f75b2c6c68cd4543a9c788f4d6c56002 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 916.870061] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f75b2c6c68cd4543a9c788f4d6c56002 [ 916.909021] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58abb969-86e1-42aa-a94a-8dc3104d7c39 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.917240] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b45a66c-9de4-453c-b9b4-6d16e076f5cc {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.946176] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09ae3688-4183-4a5c-a030-eff15ccc26b8 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.953073] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3e7724b-c1b3-42f2-8031-b4d3b942b0ec {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.967051] env[62096]: DEBUG nova.compute.provider_tree [None req-d8bc9aca-755b-4171-8a37-b8947b203c4b tempest-ServerPasswordTestJSON-1799855642 tempest-ServerPasswordTestJSON-1799855642-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 916.967583] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d8bc9aca-755b-4171-8a37-b8947b203c4b tempest-ServerPasswordTestJSON-1799855642 tempest-ServerPasswordTestJSON-1799855642-project-member] Expecting reply to msg aa484defbf9644809f17cc33327d9b86 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 916.975737] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg aa484defbf9644809f17cc33327d9b86 [ 917.010277] env[62096]: DEBUG nova.compute.manager [req-a63230c6-cba8-474a-870c-dfbf2e876134 req-6cd4d75c-b6c7-48c5-80a4-906341db61fc service nova] [instance: 8802af61-3692-4e27-85f4-945afccff231] Received event network-vif-deleted-3e05661d-2e87-4072-aba5-687246e50404 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 917.171048] env[62096]: DEBUG oslo_concurrency.lockutils [None req-064fa4d0-df95-4930-985c-d30de278cda0 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Releasing lock "refresh_cache-8802af61-3692-4e27-85f4-945afccff231" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 917.171484] env[62096]: DEBUG nova.compute.manager [None req-064fa4d0-df95-4930-985c-d30de278cda0 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] [instance: 8802af61-3692-4e27-85f4-945afccff231] Start destroying the instance on the hypervisor. {{(pid=62096) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 917.171674] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-064fa4d0-df95-4930-985c-d30de278cda0 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] [instance: 8802af61-3692-4e27-85f4-945afccff231] Destroying instance {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 917.172039] env[62096]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0931b438-ea6a-4b4d-8f21-5c7c2a325100 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.181663] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3913b998-e8db-44ae-9af2-e82f44edcdfd {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.202731] env[62096]: WARNING nova.virt.vmwareapi.vmops [None req-064fa4d0-df95-4930-985c-d30de278cda0 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] [instance: 8802af61-3692-4e27-85f4-945afccff231] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 8802af61-3692-4e27-85f4-945afccff231 could not be found. [ 917.202974] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-064fa4d0-df95-4930-985c-d30de278cda0 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] [instance: 8802af61-3692-4e27-85f4-945afccff231] Instance destroyed {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 917.203928] env[62096]: INFO nova.compute.manager [None req-064fa4d0-df95-4930-985c-d30de278cda0 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] [instance: 8802af61-3692-4e27-85f4-945afccff231] Took 0.03 seconds to destroy the instance on the hypervisor. [ 917.204209] env[62096]: DEBUG oslo.service.loopingcall [None req-064fa4d0-df95-4930-985c-d30de278cda0 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 917.204488] env[62096]: DEBUG nova.compute.manager [-] [instance: 8802af61-3692-4e27-85f4-945afccff231] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 917.204581] env[62096]: DEBUG nova.network.neutron [-] [instance: 8802af61-3692-4e27-85f4-945afccff231] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 917.219061] env[62096]: DEBUG nova.network.neutron [-] [instance: 8802af61-3692-4e27-85f4-945afccff231] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 917.219559] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 66f51ed6f3cc43f69672b7e223551129 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 917.226548] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 66f51ed6f3cc43f69672b7e223551129 [ 917.366754] env[62096]: INFO nova.compute.manager [None req-66bcc827-9e2c-4570-a904-739ded1c169f tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] [instance: ce80b353-2f7c-4165-b4fa-b81a1e539295] Took 1.02 seconds to deallocate network for instance. [ 917.367630] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-66bcc827-9e2c-4570-a904-739ded1c169f tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Expecting reply to msg 633822c6e4d5493f83a1ad70ef3156c0 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 917.416187] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 633822c6e4d5493f83a1ad70ef3156c0 [ 917.470418] env[62096]: DEBUG nova.scheduler.client.report [None req-d8bc9aca-755b-4171-8a37-b8947b203c4b tempest-ServerPasswordTestJSON-1799855642 tempest-ServerPasswordTestJSON-1799855642-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 917.472813] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d8bc9aca-755b-4171-8a37-b8947b203c4b tempest-ServerPasswordTestJSON-1799855642 tempest-ServerPasswordTestJSON-1799855642-project-member] Expecting reply to msg cd2b012ab964431c81dd4c486dfa78f2 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 917.487212] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cd2b012ab964431c81dd4c486dfa78f2 [ 917.722099] env[62096]: DEBUG nova.network.neutron [-] [instance: 8802af61-3692-4e27-85f4-945afccff231] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 917.722568] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 955cbcdaa6bf4b149cf10c6f2148cb8c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 917.730475] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 955cbcdaa6bf4b149cf10c6f2148cb8c [ 917.872408] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-66bcc827-9e2c-4570-a904-739ded1c169f tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Expecting reply to msg 45e977b3e3e64c4eafe1080dc00a9bbf in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 917.902507] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 45e977b3e3e64c4eafe1080dc00a9bbf [ 917.975811] env[62096]: DEBUG oslo_concurrency.lockutils [None req-d8bc9aca-755b-4171-8a37-b8947b203c4b tempest-ServerPasswordTestJSON-1799855642 tempest-ServerPasswordTestJSON-1799855642-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.722s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 917.976491] env[62096]: ERROR nova.compute.manager [None req-d8bc9aca-755b-4171-8a37-b8947b203c4b tempest-ServerPasswordTestJSON-1799855642 tempest-ServerPasswordTestJSON-1799855642-project-member] [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 6c4e23f6-89de-4e27-b743-db4133b589d4, please check neutron logs for more information. [ 917.976491] env[62096]: ERROR nova.compute.manager [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] Traceback (most recent call last): [ 917.976491] env[62096]: ERROR nova.compute.manager [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 917.976491] env[62096]: ERROR nova.compute.manager [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] self.driver.spawn(context, instance, image_meta, [ 917.976491] env[62096]: ERROR nova.compute.manager [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 917.976491] env[62096]: ERROR nova.compute.manager [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] self._vmops.spawn(context, instance, image_meta, injected_files, [ 917.976491] env[62096]: ERROR nova.compute.manager [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 917.976491] env[62096]: ERROR nova.compute.manager [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] vm_ref = self.build_virtual_machine(instance, [ 917.976491] env[62096]: ERROR nova.compute.manager [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 917.976491] env[62096]: ERROR nova.compute.manager [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] vif_infos = vmwarevif.get_vif_info(self._session, [ 917.976491] env[62096]: ERROR nova.compute.manager [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 917.976893] env[62096]: ERROR nova.compute.manager [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] for vif in network_info: [ 917.976893] env[62096]: ERROR nova.compute.manager [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 917.976893] env[62096]: ERROR nova.compute.manager [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] return self._sync_wrapper(fn, *args, **kwargs) [ 917.976893] env[62096]: ERROR nova.compute.manager [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 917.976893] env[62096]: ERROR nova.compute.manager [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] self.wait() [ 917.976893] env[62096]: ERROR nova.compute.manager [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 917.976893] env[62096]: ERROR nova.compute.manager [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] self[:] = self._gt.wait() [ 917.976893] env[62096]: ERROR nova.compute.manager [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 917.976893] env[62096]: ERROR nova.compute.manager [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] return self._exit_event.wait() [ 917.976893] env[62096]: ERROR nova.compute.manager [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 917.976893] env[62096]: ERROR nova.compute.manager [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] current.throw(*self._exc) [ 917.976893] env[62096]: ERROR nova.compute.manager [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 917.976893] env[62096]: ERROR nova.compute.manager [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] result = function(*args, **kwargs) [ 917.977289] env[62096]: ERROR nova.compute.manager [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 917.977289] env[62096]: ERROR nova.compute.manager [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] return func(*args, **kwargs) [ 917.977289] env[62096]: ERROR nova.compute.manager [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 917.977289] env[62096]: ERROR nova.compute.manager [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] raise e [ 917.977289] env[62096]: ERROR nova.compute.manager [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 917.977289] env[62096]: ERROR nova.compute.manager [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] nwinfo = self.network_api.allocate_for_instance( [ 917.977289] env[62096]: ERROR nova.compute.manager [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 917.977289] env[62096]: ERROR nova.compute.manager [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] created_port_ids = self._update_ports_for_instance( [ 917.977289] env[62096]: ERROR nova.compute.manager [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 917.977289] env[62096]: ERROR nova.compute.manager [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] with excutils.save_and_reraise_exception(): [ 917.977289] env[62096]: ERROR nova.compute.manager [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 917.977289] env[62096]: ERROR nova.compute.manager [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] self.force_reraise() [ 917.977289] env[62096]: ERROR nova.compute.manager [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 917.977685] env[62096]: ERROR nova.compute.manager [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] raise self.value [ 917.977685] env[62096]: ERROR nova.compute.manager [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 917.977685] env[62096]: ERROR nova.compute.manager [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] updated_port = self._update_port( [ 917.977685] env[62096]: ERROR nova.compute.manager [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 917.977685] env[62096]: ERROR nova.compute.manager [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] _ensure_no_port_binding_failure(port) [ 917.977685] env[62096]: ERROR nova.compute.manager [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 917.977685] env[62096]: ERROR nova.compute.manager [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] raise exception.PortBindingFailed(port_id=port['id']) [ 917.977685] env[62096]: ERROR nova.compute.manager [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] nova.exception.PortBindingFailed: Binding failed for port 6c4e23f6-89de-4e27-b743-db4133b589d4, please check neutron logs for more information. [ 917.977685] env[62096]: ERROR nova.compute.manager [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] [ 917.977685] env[62096]: DEBUG nova.compute.utils [None req-d8bc9aca-755b-4171-8a37-b8947b203c4b tempest-ServerPasswordTestJSON-1799855642 tempest-ServerPasswordTestJSON-1799855642-project-member] [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] Binding failed for port 6c4e23f6-89de-4e27-b743-db4133b589d4, please check neutron logs for more information. {{(pid=62096) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 917.978714] env[62096]: DEBUG oslo_concurrency.lockutils [None req-0fdf712d-9ab7-47de-a5a9-f02790031e75 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.177s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 917.980102] env[62096]: INFO nova.compute.claims [None req-0fdf712d-9ab7-47de-a5a9-f02790031e75 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 917.981790] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-0fdf712d-9ab7-47de-a5a9-f02790031e75 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg 7b6619344ee2404da6ab55da4c905980 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 917.982983] env[62096]: DEBUG nova.compute.manager [None req-d8bc9aca-755b-4171-8a37-b8947b203c4b tempest-ServerPasswordTestJSON-1799855642 tempest-ServerPasswordTestJSON-1799855642-project-member] [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] Build of instance f3f90842-edaa-42b0-9b21-25a952fc8288 was re-scheduled: Binding failed for port 6c4e23f6-89de-4e27-b743-db4133b589d4, please check neutron logs for more information. {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 917.983398] env[62096]: DEBUG nova.compute.manager [None req-d8bc9aca-755b-4171-8a37-b8947b203c4b tempest-ServerPasswordTestJSON-1799855642 tempest-ServerPasswordTestJSON-1799855642-project-member] [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] Unplugging VIFs for instance {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 917.983621] env[62096]: DEBUG oslo_concurrency.lockutils [None req-d8bc9aca-755b-4171-8a37-b8947b203c4b tempest-ServerPasswordTestJSON-1799855642 tempest-ServerPasswordTestJSON-1799855642-project-member] Acquiring lock "refresh_cache-f3f90842-edaa-42b0-9b21-25a952fc8288" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 917.983769] env[62096]: DEBUG oslo_concurrency.lockutils [None req-d8bc9aca-755b-4171-8a37-b8947b203c4b tempest-ServerPasswordTestJSON-1799855642 tempest-ServerPasswordTestJSON-1799855642-project-member] Acquired lock "refresh_cache-f3f90842-edaa-42b0-9b21-25a952fc8288" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 917.983926] env[62096]: DEBUG nova.network.neutron [None req-d8bc9aca-755b-4171-8a37-b8947b203c4b tempest-ServerPasswordTestJSON-1799855642 tempest-ServerPasswordTestJSON-1799855642-project-member] [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 917.984331] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d8bc9aca-755b-4171-8a37-b8947b203c4b tempest-ServerPasswordTestJSON-1799855642 tempest-ServerPasswordTestJSON-1799855642-project-member] Expecting reply to msg 29d5888098bf4b1ea1181cb21b9756c6 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 917.993912] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 29d5888098bf4b1ea1181cb21b9756c6 [ 918.026915] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7b6619344ee2404da6ab55da4c905980 [ 918.224708] env[62096]: INFO nova.compute.manager [-] [instance: 8802af61-3692-4e27-85f4-945afccff231] Took 1.02 seconds to deallocate network for instance. [ 918.227076] env[62096]: DEBUG nova.compute.claims [None req-064fa4d0-df95-4930-985c-d30de278cda0 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] [instance: 8802af61-3692-4e27-85f4-945afccff231] Aborting claim: {{(pid=62096) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 918.227269] env[62096]: DEBUG oslo_concurrency.lockutils [None req-064fa4d0-df95-4930-985c-d30de278cda0 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 918.398920] env[62096]: INFO nova.scheduler.client.report [None req-66bcc827-9e2c-4570-a904-739ded1c169f tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Deleted allocations for instance ce80b353-2f7c-4165-b4fa-b81a1e539295 [ 918.403738] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-66bcc827-9e2c-4570-a904-739ded1c169f tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Expecting reply to msg e6638bf28ec640698098adf71332a180 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 918.417094] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e6638bf28ec640698098adf71332a180 [ 918.487272] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-0fdf712d-9ab7-47de-a5a9-f02790031e75 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg 1bb75296e87542b7abe7c85735db2e6e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 918.494468] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1bb75296e87542b7abe7c85735db2e6e [ 918.503218] env[62096]: DEBUG nova.network.neutron [None req-d8bc9aca-755b-4171-8a37-b8947b203c4b tempest-ServerPasswordTestJSON-1799855642 tempest-ServerPasswordTestJSON-1799855642-project-member] [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 918.702932] env[62096]: DEBUG nova.network.neutron [None req-d8bc9aca-755b-4171-8a37-b8947b203c4b tempest-ServerPasswordTestJSON-1799855642 tempest-ServerPasswordTestJSON-1799855642-project-member] [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 918.703381] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d8bc9aca-755b-4171-8a37-b8947b203c4b tempest-ServerPasswordTestJSON-1799855642 tempest-ServerPasswordTestJSON-1799855642-project-member] Expecting reply to msg 29214b1dabbe44f48e9b6925a4405c04 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 918.712059] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 29214b1dabbe44f48e9b6925a4405c04 [ 918.905906] env[62096]: DEBUG oslo_concurrency.lockutils [None req-66bcc827-9e2c-4570-a904-739ded1c169f tempest-AttachVolumeTestJSON-1709933096 tempest-AttachVolumeTestJSON-1709933096-project-member] Lock "ce80b353-2f7c-4165-b4fa-b81a1e539295" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 92.593s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 918.906490] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee01a51e-dfc7-48ec-b8ca-f308df27a4d0 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Expecting reply to msg c00a2f15a2014ae597edba9d31260fd8 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 918.918815] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c00a2f15a2014ae597edba9d31260fd8 [ 919.130467] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a07fa523-becf-44a2-8da4-ab777a4c07da {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.138514] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f929ccb8-a3f9-47b5-b51c-c4162bdc5bc1 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.169692] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-304713a6-d123-44b5-9251-6105267bae82 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.176765] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a064c732-eb42-4779-9acd-0a975434cf94 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.189819] env[62096]: DEBUG nova.compute.provider_tree [None req-0fdf712d-9ab7-47de-a5a9-f02790031e75 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 919.190311] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-0fdf712d-9ab7-47de-a5a9-f02790031e75 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg 77902917278b4daba0bfcadd16437959 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 919.199672] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 77902917278b4daba0bfcadd16437959 [ 919.205942] env[62096]: DEBUG oslo_concurrency.lockutils [None req-d8bc9aca-755b-4171-8a37-b8947b203c4b tempest-ServerPasswordTestJSON-1799855642 tempest-ServerPasswordTestJSON-1799855642-project-member] Releasing lock "refresh_cache-f3f90842-edaa-42b0-9b21-25a952fc8288" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 919.206140] env[62096]: DEBUG nova.compute.manager [None req-d8bc9aca-755b-4171-8a37-b8947b203c4b tempest-ServerPasswordTestJSON-1799855642 tempest-ServerPasswordTestJSON-1799855642-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 919.206320] env[62096]: DEBUG nova.compute.manager [None req-d8bc9aca-755b-4171-8a37-b8947b203c4b tempest-ServerPasswordTestJSON-1799855642 tempest-ServerPasswordTestJSON-1799855642-project-member] [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 919.206482] env[62096]: DEBUG nova.network.neutron [None req-d8bc9aca-755b-4171-8a37-b8947b203c4b tempest-ServerPasswordTestJSON-1799855642 tempest-ServerPasswordTestJSON-1799855642-project-member] [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 919.223640] env[62096]: DEBUG nova.network.neutron [None req-d8bc9aca-755b-4171-8a37-b8947b203c4b tempest-ServerPasswordTestJSON-1799855642 tempest-ServerPasswordTestJSON-1799855642-project-member] [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 919.224178] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d8bc9aca-755b-4171-8a37-b8947b203c4b tempest-ServerPasswordTestJSON-1799855642 tempest-ServerPasswordTestJSON-1799855642-project-member] Expecting reply to msg 45ca8c5d5a7e444bae62a6bcefb5a2f8 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 919.230486] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 45ca8c5d5a7e444bae62a6bcefb5a2f8 [ 919.409024] env[62096]: DEBUG nova.compute.manager [None req-ee01a51e-dfc7-48ec-b8ca-f308df27a4d0 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] Starting instance... {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 919.410757] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee01a51e-dfc7-48ec-b8ca-f308df27a4d0 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Expecting reply to msg fde79b475e434be2b3872a3d4ea068aa in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 919.455350] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fde79b475e434be2b3872a3d4ea068aa [ 919.693173] env[62096]: DEBUG nova.scheduler.client.report [None req-0fdf712d-9ab7-47de-a5a9-f02790031e75 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 919.695661] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-0fdf712d-9ab7-47de-a5a9-f02790031e75 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg d58c20a108304ca5b385a3dd92284c45 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 919.708696] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d58c20a108304ca5b385a3dd92284c45 [ 919.725687] env[62096]: DEBUG nova.network.neutron [None req-d8bc9aca-755b-4171-8a37-b8947b203c4b tempest-ServerPasswordTestJSON-1799855642 tempest-ServerPasswordTestJSON-1799855642-project-member] [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 919.726223] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d8bc9aca-755b-4171-8a37-b8947b203c4b tempest-ServerPasswordTestJSON-1799855642 tempest-ServerPasswordTestJSON-1799855642-project-member] Expecting reply to msg 5ec32fcb685849cb9458b032d8271237 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 919.734599] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5ec32fcb685849cb9458b032d8271237 [ 919.930828] env[62096]: DEBUG oslo_concurrency.lockutils [None req-ee01a51e-dfc7-48ec-b8ca-f308df27a4d0 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 920.198231] env[62096]: DEBUG oslo_concurrency.lockutils [None req-0fdf712d-9ab7-47de-a5a9-f02790031e75 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.219s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 920.198837] env[62096]: DEBUG nova.compute.manager [None req-0fdf712d-9ab7-47de-a5a9-f02790031e75 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] Start building networks asynchronously for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 920.200522] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-0fdf712d-9ab7-47de-a5a9-f02790031e75 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg af4c390946e14f5f920313a69a33925e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 920.201650] env[62096]: DEBUG oslo_concurrency.lockutils [None req-4398236f-4ae2-4ce8-81b8-15e3e6e18674 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.217s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 920.201820] env[62096]: DEBUG nova.objects.instance [None req-4398236f-4ae2-4ce8-81b8-15e3e6e18674 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Lazy-loading 'resources' on Instance uuid 5627f913-29d2-476e-8fde-8ea457cc56f2 {{(pid=62096) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 920.202130] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4398236f-4ae2-4ce8-81b8-15e3e6e18674 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Expecting reply to msg 6dcebaf83cef4fea902b807bb0156068 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 920.208761] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6dcebaf83cef4fea902b807bb0156068 [ 920.228196] env[62096]: INFO nova.compute.manager [None req-d8bc9aca-755b-4171-8a37-b8947b203c4b tempest-ServerPasswordTestJSON-1799855642 tempest-ServerPasswordTestJSON-1799855642-project-member] [instance: f3f90842-edaa-42b0-9b21-25a952fc8288] Took 1.02 seconds to deallocate network for instance. [ 920.230025] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d8bc9aca-755b-4171-8a37-b8947b203c4b tempest-ServerPasswordTestJSON-1799855642 tempest-ServerPasswordTestJSON-1799855642-project-member] Expecting reply to msg 461f5958dbca420782b23943368d36e6 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 920.234622] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg af4c390946e14f5f920313a69a33925e [ 920.267516] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 461f5958dbca420782b23943368d36e6 [ 920.704878] env[62096]: DEBUG nova.compute.utils [None req-0fdf712d-9ab7-47de-a5a9-f02790031e75 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Using /dev/sd instead of None {{(pid=62096) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 920.705517] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-0fdf712d-9ab7-47de-a5a9-f02790031e75 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg 3d8de419038d4ed4a6f7c0484f723566 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 920.713684] env[62096]: DEBUG nova.compute.manager [None req-0fdf712d-9ab7-47de-a5a9-f02790031e75 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] Allocating IP information in the background. {{(pid=62096) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 920.713684] env[62096]: DEBUG nova.network.neutron [None req-0fdf712d-9ab7-47de-a5a9-f02790031e75 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] allocate_for_instance() {{(pid=62096) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 920.715599] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3d8de419038d4ed4a6f7c0484f723566 [ 920.734809] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d8bc9aca-755b-4171-8a37-b8947b203c4b tempest-ServerPasswordTestJSON-1799855642 tempest-ServerPasswordTestJSON-1799855642-project-member] Expecting reply to msg 6b23df9e6f474fff9073f26dbfe930ea in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 920.776646] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6b23df9e6f474fff9073f26dbfe930ea [ 920.783231] env[62096]: DEBUG nova.policy [None req-0fdf712d-9ab7-47de-a5a9-f02790031e75 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '491a2069427f43e79347a70e475e4dd2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e45573130e8e4ce4979b37e1b4c5af9f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62096) authorize /opt/stack/nova/nova/policy.py:203}} [ 920.908983] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e86b0fa-e088-414b-bc2f-13a9e97622b6 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.916854] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-953f3948-10c4-4942-b0d2-9c9ba1c3eb60 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.947794] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccee3541-8de2-4dcc-8d81-097e11c35b0c {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.955029] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6f64fc4-0cad-497b-9c8a-41654ad6047a {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.967931] env[62096]: DEBUG nova.compute.provider_tree [None req-4398236f-4ae2-4ce8-81b8-15e3e6e18674 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 920.968940] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4398236f-4ae2-4ce8-81b8-15e3e6e18674 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Expecting reply to msg b720197093ea4551ad3192d2ef071352 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 920.976249] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b720197093ea4551ad3192d2ef071352 [ 921.038525] env[62096]: DEBUG nova.network.neutron [None req-0fdf712d-9ab7-47de-a5a9-f02790031e75 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] Successfully created port: 809ee1cb-7807-4bd3-b19d-13bbe996c738 {{(pid=62096) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 921.210472] env[62096]: DEBUG nova.compute.manager [None req-0fdf712d-9ab7-47de-a5a9-f02790031e75 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] Start building block device mappings for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 921.212210] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-0fdf712d-9ab7-47de-a5a9-f02790031e75 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg 55bb10c58d15407f8975cc40b6463e89 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 921.251367] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 55bb10c58d15407f8975cc40b6463e89 [ 921.260922] env[62096]: INFO nova.scheduler.client.report [None req-d8bc9aca-755b-4171-8a37-b8947b203c4b tempest-ServerPasswordTestJSON-1799855642 tempest-ServerPasswordTestJSON-1799855642-project-member] Deleted allocations for instance f3f90842-edaa-42b0-9b21-25a952fc8288 [ 921.268724] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d8bc9aca-755b-4171-8a37-b8947b203c4b tempest-ServerPasswordTestJSON-1799855642 tempest-ServerPasswordTestJSON-1799855642-project-member] Expecting reply to msg 177cf18c55b242a886f588b79ac40b89 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 921.283379] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 177cf18c55b242a886f588b79ac40b89 [ 921.477344] env[62096]: DEBUG nova.scheduler.client.report [None req-4398236f-4ae2-4ce8-81b8-15e3e6e18674 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 921.479696] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4398236f-4ae2-4ce8-81b8-15e3e6e18674 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Expecting reply to msg 17f8b4a000e3445ebd7e08d595265dea in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 921.491786] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 17f8b4a000e3445ebd7e08d595265dea [ 921.719758] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-0fdf712d-9ab7-47de-a5a9-f02790031e75 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg e57c333d4813470aa19baebf6eae3eb9 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 921.756733] env[62096]: DEBUG nova.compute.manager [req-aad54da7-2f62-489a-b32d-ae6acf4c3e01 req-edd3b3e3-be70-4b80-a4be-40d0817d8415 service nova] [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] Received event network-changed-809ee1cb-7807-4bd3-b19d-13bbe996c738 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 921.756938] env[62096]: DEBUG nova.compute.manager [req-aad54da7-2f62-489a-b32d-ae6acf4c3e01 req-edd3b3e3-be70-4b80-a4be-40d0817d8415 service nova] [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] Refreshing instance network info cache due to event network-changed-809ee1cb-7807-4bd3-b19d-13bbe996c738. {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 921.757148] env[62096]: DEBUG oslo_concurrency.lockutils [req-aad54da7-2f62-489a-b32d-ae6acf4c3e01 req-edd3b3e3-be70-4b80-a4be-40d0817d8415 service nova] Acquiring lock "refresh_cache-0f275603-acfc-43db-8a71-a17af8e837b4" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 921.757288] env[62096]: DEBUG oslo_concurrency.lockutils [req-aad54da7-2f62-489a-b32d-ae6acf4c3e01 req-edd3b3e3-be70-4b80-a4be-40d0817d8415 service nova] Acquired lock "refresh_cache-0f275603-acfc-43db-8a71-a17af8e837b4" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 921.757449] env[62096]: DEBUG nova.network.neutron [req-aad54da7-2f62-489a-b32d-ae6acf4c3e01 req-edd3b3e3-be70-4b80-a4be-40d0817d8415 service nova] [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] Refreshing network info cache for port 809ee1cb-7807-4bd3-b19d-13bbe996c738 {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 921.757869] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-aad54da7-2f62-489a-b32d-ae6acf4c3e01 req-edd3b3e3-be70-4b80-a4be-40d0817d8415 service nova] Expecting reply to msg d9fafbfe297a401284bb77b324b8e354 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 921.760628] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e57c333d4813470aa19baebf6eae3eb9 [ 921.764665] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d9fafbfe297a401284bb77b324b8e354 [ 921.768045] env[62096]: DEBUG oslo_concurrency.lockutils [None req-d8bc9aca-755b-4171-8a37-b8947b203c4b tempest-ServerPasswordTestJSON-1799855642 tempest-ServerPasswordTestJSON-1799855642-project-member] Lock "f3f90842-edaa-42b0-9b21-25a952fc8288" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 90.363s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 921.967473] env[62096]: ERROR nova.compute.manager [None req-0fdf712d-9ab7-47de-a5a9-f02790031e75 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 809ee1cb-7807-4bd3-b19d-13bbe996c738, please check neutron logs for more information. [ 921.967473] env[62096]: ERROR nova.compute.manager Traceback (most recent call last): [ 921.967473] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 921.967473] env[62096]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 921.967473] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 921.967473] env[62096]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 921.967473] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 921.967473] env[62096]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 921.967473] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 921.967473] env[62096]: ERROR nova.compute.manager self.force_reraise() [ 921.967473] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 921.967473] env[62096]: ERROR nova.compute.manager raise self.value [ 921.967473] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 921.967473] env[62096]: ERROR nova.compute.manager updated_port = self._update_port( [ 921.967473] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 921.967473] env[62096]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 921.968167] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 921.968167] env[62096]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 921.968167] env[62096]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 809ee1cb-7807-4bd3-b19d-13bbe996c738, please check neutron logs for more information. [ 921.968167] env[62096]: ERROR nova.compute.manager [ 921.968167] env[62096]: Traceback (most recent call last): [ 921.968167] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 921.968167] env[62096]: listener.cb(fileno) [ 921.968167] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 921.968167] env[62096]: result = function(*args, **kwargs) [ 921.968167] env[62096]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 921.968167] env[62096]: return func(*args, **kwargs) [ 921.968167] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 921.968167] env[62096]: raise e [ 921.968167] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 921.968167] env[62096]: nwinfo = self.network_api.allocate_for_instance( [ 921.968167] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 921.968167] env[62096]: created_port_ids = self._update_ports_for_instance( [ 921.968167] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 921.968167] env[62096]: with excutils.save_and_reraise_exception(): [ 921.968167] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 921.968167] env[62096]: self.force_reraise() [ 921.968167] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 921.968167] env[62096]: raise self.value [ 921.968167] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 921.968167] env[62096]: updated_port = self._update_port( [ 921.968167] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 921.968167] env[62096]: _ensure_no_port_binding_failure(port) [ 921.968167] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 921.968167] env[62096]: raise exception.PortBindingFailed(port_id=port['id']) [ 921.969001] env[62096]: nova.exception.PortBindingFailed: Binding failed for port 809ee1cb-7807-4bd3-b19d-13bbe996c738, please check neutron logs for more information. [ 921.969001] env[62096]: Removing descriptor: 14 [ 921.988735] env[62096]: DEBUG oslo_concurrency.lockutils [None req-4398236f-4ae2-4ce8-81b8-15e3e6e18674 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.781s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 921.988735] env[62096]: DEBUG oslo_concurrency.lockutils [None req-66abb748-9105-4dd1-b487-dfa24b044df5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.863s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 921.988735] env[62096]: INFO nova.compute.claims [None req-66abb748-9105-4dd1-b487-dfa24b044df5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 921.988735] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-66abb748-9105-4dd1-b487-dfa24b044df5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg c59c52bc14024a6d919c7bf69c36ff0c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 922.009127] env[62096]: INFO nova.scheduler.client.report [None req-4398236f-4ae2-4ce8-81b8-15e3e6e18674 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Deleted allocations for instance 5627f913-29d2-476e-8fde-8ea457cc56f2 [ 922.011763] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4398236f-4ae2-4ce8-81b8-15e3e6e18674 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Expecting reply to msg a97ddc2e329848458a47eb6322e3e6ff in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 922.068702] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c59c52bc14024a6d919c7bf69c36ff0c [ 922.082183] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a97ddc2e329848458a47eb6322e3e6ff [ 922.223026] env[62096]: DEBUG nova.compute.manager [None req-0fdf712d-9ab7-47de-a5a9-f02790031e75 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] Start spawning the instance on the hypervisor. {{(pid=62096) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 922.249051] env[62096]: DEBUG nova.virt.hardware [None req-0fdf712d-9ab7-47de-a5a9-f02790031e75 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-19T11:52:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-19T11:51:55Z,direct_url=,disk_format='vmdk',id=fb764baa-3805-45c4-a694-aa91b0932110,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bd76085d004b408bbcf1ebf76ba2593e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-19T11:51:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 922.249284] env[62096]: DEBUG nova.virt.hardware [None req-0fdf712d-9ab7-47de-a5a9-f02790031e75 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Flavor limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 922.249434] env[62096]: DEBUG nova.virt.hardware [None req-0fdf712d-9ab7-47de-a5a9-f02790031e75 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Image limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 922.249601] env[62096]: DEBUG nova.virt.hardware [None req-0fdf712d-9ab7-47de-a5a9-f02790031e75 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Flavor pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 922.249738] env[62096]: DEBUG nova.virt.hardware [None req-0fdf712d-9ab7-47de-a5a9-f02790031e75 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Image pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 922.249876] env[62096]: DEBUG nova.virt.hardware [None req-0fdf712d-9ab7-47de-a5a9-f02790031e75 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 922.250092] env[62096]: DEBUG nova.virt.hardware [None req-0fdf712d-9ab7-47de-a5a9-f02790031e75 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 922.250221] env[62096]: DEBUG nova.virt.hardware [None req-0fdf712d-9ab7-47de-a5a9-f02790031e75 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 922.250375] env[62096]: DEBUG nova.virt.hardware [None req-0fdf712d-9ab7-47de-a5a9-f02790031e75 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Got 1 possible topologies {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 922.250585] env[62096]: DEBUG nova.virt.hardware [None req-0fdf712d-9ab7-47de-a5a9-f02790031e75 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 922.250715] env[62096]: DEBUG nova.virt.hardware [None req-0fdf712d-9ab7-47de-a5a9-f02790031e75 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 922.251547] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cf63d7a-c3bf-4636-9041-f5d6c9d2ef62 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.259409] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f320b5fc-bc1d-4365-9b97-361cd67d67fe {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.275732] env[62096]: ERROR nova.compute.manager [None req-0fdf712d-9ab7-47de-a5a9-f02790031e75 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 809ee1cb-7807-4bd3-b19d-13bbe996c738, please check neutron logs for more information. [ 922.275732] env[62096]: ERROR nova.compute.manager [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] Traceback (most recent call last): [ 922.275732] env[62096]: ERROR nova.compute.manager [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 922.275732] env[62096]: ERROR nova.compute.manager [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] yield resources [ 922.275732] env[62096]: ERROR nova.compute.manager [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 922.275732] env[62096]: ERROR nova.compute.manager [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] self.driver.spawn(context, instance, image_meta, [ 922.275732] env[62096]: ERROR nova.compute.manager [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 922.275732] env[62096]: ERROR nova.compute.manager [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 922.275732] env[62096]: ERROR nova.compute.manager [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 922.275732] env[62096]: ERROR nova.compute.manager [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] vm_ref = self.build_virtual_machine(instance, [ 922.275732] env[62096]: ERROR nova.compute.manager [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 922.276212] env[62096]: ERROR nova.compute.manager [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] vif_infos = vmwarevif.get_vif_info(self._session, [ 922.276212] env[62096]: ERROR nova.compute.manager [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 922.276212] env[62096]: ERROR nova.compute.manager [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] for vif in network_info: [ 922.276212] env[62096]: ERROR nova.compute.manager [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 922.276212] env[62096]: ERROR nova.compute.manager [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] return self._sync_wrapper(fn, *args, **kwargs) [ 922.276212] env[62096]: ERROR nova.compute.manager [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 922.276212] env[62096]: ERROR nova.compute.manager [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] self.wait() [ 922.276212] env[62096]: ERROR nova.compute.manager [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 922.276212] env[62096]: ERROR nova.compute.manager [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] self[:] = self._gt.wait() [ 922.276212] env[62096]: ERROR nova.compute.manager [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 922.276212] env[62096]: ERROR nova.compute.manager [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] return self._exit_event.wait() [ 922.276212] env[62096]: ERROR nova.compute.manager [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 922.276212] env[62096]: ERROR nova.compute.manager [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] current.throw(*self._exc) [ 922.276641] env[62096]: ERROR nova.compute.manager [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 922.276641] env[62096]: ERROR nova.compute.manager [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] result = function(*args, **kwargs) [ 922.276641] env[62096]: ERROR nova.compute.manager [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 922.276641] env[62096]: ERROR nova.compute.manager [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] return func(*args, **kwargs) [ 922.276641] env[62096]: ERROR nova.compute.manager [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 922.276641] env[62096]: ERROR nova.compute.manager [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] raise e [ 922.276641] env[62096]: ERROR nova.compute.manager [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 922.276641] env[62096]: ERROR nova.compute.manager [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] nwinfo = self.network_api.allocate_for_instance( [ 922.276641] env[62096]: ERROR nova.compute.manager [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 922.276641] env[62096]: ERROR nova.compute.manager [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] created_port_ids = self._update_ports_for_instance( [ 922.276641] env[62096]: ERROR nova.compute.manager [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 922.276641] env[62096]: ERROR nova.compute.manager [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] with excutils.save_and_reraise_exception(): [ 922.276641] env[62096]: ERROR nova.compute.manager [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 922.277042] env[62096]: ERROR nova.compute.manager [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] self.force_reraise() [ 922.277042] env[62096]: ERROR nova.compute.manager [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 922.277042] env[62096]: ERROR nova.compute.manager [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] raise self.value [ 922.277042] env[62096]: ERROR nova.compute.manager [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 922.277042] env[62096]: ERROR nova.compute.manager [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] updated_port = self._update_port( [ 922.277042] env[62096]: ERROR nova.compute.manager [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 922.277042] env[62096]: ERROR nova.compute.manager [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] _ensure_no_port_binding_failure(port) [ 922.277042] env[62096]: ERROR nova.compute.manager [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 922.277042] env[62096]: ERROR nova.compute.manager [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] raise exception.PortBindingFailed(port_id=port['id']) [ 922.277042] env[62096]: ERROR nova.compute.manager [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] nova.exception.PortBindingFailed: Binding failed for port 809ee1cb-7807-4bd3-b19d-13bbe996c738, please check neutron logs for more information. [ 922.277042] env[62096]: ERROR nova.compute.manager [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] [ 922.277042] env[62096]: INFO nova.compute.manager [None req-0fdf712d-9ab7-47de-a5a9-f02790031e75 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] Terminating instance [ 922.277884] env[62096]: DEBUG oslo_concurrency.lockutils [None req-0fdf712d-9ab7-47de-a5a9-f02790031e75 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Acquiring lock "refresh_cache-0f275603-acfc-43db-8a71-a17af8e837b4" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 922.282510] env[62096]: DEBUG nova.network.neutron [req-aad54da7-2f62-489a-b32d-ae6acf4c3e01 req-edd3b3e3-be70-4b80-a4be-40d0817d8415 service nova] [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 922.397831] env[62096]: DEBUG nova.network.neutron [req-aad54da7-2f62-489a-b32d-ae6acf4c3e01 req-edd3b3e3-be70-4b80-a4be-40d0817d8415 service nova] [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 922.397983] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-aad54da7-2f62-489a-b32d-ae6acf4c3e01 req-edd3b3e3-be70-4b80-a4be-40d0817d8415 service nova] Expecting reply to msg 7bb3bf1e42fc4c3daddc34b2a264888e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 922.408080] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7bb3bf1e42fc4c3daddc34b2a264888e [ 922.491665] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-66abb748-9105-4dd1-b487-dfa24b044df5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg 530829caa77c4be188a24535d5c66620 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 922.499746] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 530829caa77c4be188a24535d5c66620 [ 922.516370] env[62096]: DEBUG oslo_concurrency.lockutils [None req-4398236f-4ae2-4ce8-81b8-15e3e6e18674 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Lock "5627f913-29d2-476e-8fde-8ea457cc56f2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 27.212s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 922.516980] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4398236f-4ae2-4ce8-81b8-15e3e6e18674 tempest-ServersAdmin275Test-1216799344 tempest-ServersAdmin275Test-1216799344-project-member] Expecting reply to msg d4085a1ce5b94a1d8051e493f11ce433 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 922.530148] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d4085a1ce5b94a1d8051e493f11ce433 [ 922.900114] env[62096]: DEBUG oslo_concurrency.lockutils [req-aad54da7-2f62-489a-b32d-ae6acf4c3e01 req-edd3b3e3-be70-4b80-a4be-40d0817d8415 service nova] Releasing lock "refresh_cache-0f275603-acfc-43db-8a71-a17af8e837b4" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 922.900635] env[62096]: DEBUG oslo_concurrency.lockutils [None req-0fdf712d-9ab7-47de-a5a9-f02790031e75 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Acquired lock "refresh_cache-0f275603-acfc-43db-8a71-a17af8e837b4" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 922.900830] env[62096]: DEBUG nova.network.neutron [None req-0fdf712d-9ab7-47de-a5a9-f02790031e75 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 922.901276] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-0fdf712d-9ab7-47de-a5a9-f02790031e75 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg 2577645c2a0f4130860fb1335001a56a in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 922.908049] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2577645c2a0f4130860fb1335001a56a [ 923.123266] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87c3949a-be2a-453f-89a1-b407608f035f {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.129469] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e27236a0-0000-469e-959e-ff1c50c7f268 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.165854] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45abb148-520d-46da-938d-168b53bcecb1 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.175258] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-169e59eb-7591-4946-9442-3fa3355fcd32 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.194807] env[62096]: DEBUG nova.compute.provider_tree [None req-66abb748-9105-4dd1-b487-dfa24b044df5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 923.195470] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-66abb748-9105-4dd1-b487-dfa24b044df5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg 374ec1fdb9614141808700e4035cb612 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 923.203894] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 374ec1fdb9614141808700e4035cb612 [ 923.421761] env[62096]: DEBUG nova.network.neutron [None req-0fdf712d-9ab7-47de-a5a9-f02790031e75 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 923.514760] env[62096]: DEBUG nova.network.neutron [None req-0fdf712d-9ab7-47de-a5a9-f02790031e75 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 923.515345] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-0fdf712d-9ab7-47de-a5a9-f02790031e75 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg 5c26dd9fde25439da367740fe0023ac7 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 923.527118] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5c26dd9fde25439da367740fe0023ac7 [ 923.697725] env[62096]: DEBUG nova.scheduler.client.report [None req-66abb748-9105-4dd1-b487-dfa24b044df5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 923.700105] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-66abb748-9105-4dd1-b487-dfa24b044df5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg 736bed0371eb416e956a19ae21fd518b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 923.711374] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 736bed0371eb416e956a19ae21fd518b [ 923.788803] env[62096]: DEBUG nova.compute.manager [req-5307127c-bccb-4fed-914f-2c10ba814ee1 req-b405ecf2-b827-4cba-8941-2151ee69830c service nova] [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] Received event network-vif-deleted-809ee1cb-7807-4bd3-b19d-13bbe996c738 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 924.021714] env[62096]: DEBUG oslo_concurrency.lockutils [None req-0fdf712d-9ab7-47de-a5a9-f02790031e75 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Releasing lock "refresh_cache-0f275603-acfc-43db-8a71-a17af8e837b4" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 924.022212] env[62096]: DEBUG nova.compute.manager [None req-0fdf712d-9ab7-47de-a5a9-f02790031e75 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] Start destroying the instance on the hypervisor. {{(pid=62096) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 924.022428] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-0fdf712d-9ab7-47de-a5a9-f02790031e75 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] Destroying instance {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 924.022740] env[62096]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-72385a66-5d88-43ca-a67b-617c45ee78ae {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.033438] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-382f2781-e07f-4314-a187-3278a2f74c2f {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.054501] env[62096]: WARNING nova.virt.vmwareapi.vmops [None req-0fdf712d-9ab7-47de-a5a9-f02790031e75 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 0f275603-acfc-43db-8a71-a17af8e837b4 could not be found. [ 924.054690] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-0fdf712d-9ab7-47de-a5a9-f02790031e75 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] Instance destroyed {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 924.054860] env[62096]: INFO nova.compute.manager [None req-0fdf712d-9ab7-47de-a5a9-f02790031e75 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] Took 0.03 seconds to destroy the instance on the hypervisor. [ 924.055093] env[62096]: DEBUG oslo.service.loopingcall [None req-0fdf712d-9ab7-47de-a5a9-f02790031e75 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 924.055302] env[62096]: DEBUG nova.compute.manager [-] [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 924.055390] env[62096]: DEBUG nova.network.neutron [-] [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 924.069197] env[62096]: DEBUG nova.network.neutron [-] [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 924.069645] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg bb96f6dc477548f6a449a9a7044e311d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 924.076430] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bb96f6dc477548f6a449a9a7044e311d [ 924.203189] env[62096]: DEBUG oslo_concurrency.lockutils [None req-66abb748-9105-4dd1-b487-dfa24b044df5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.218s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 924.203988] env[62096]: DEBUG nova.compute.manager [None req-66abb748-9105-4dd1-b487-dfa24b044df5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] Start building networks asynchronously for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 924.206999] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-66abb748-9105-4dd1-b487-dfa24b044df5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg e2240eed05544dceb7477c12a39096f4 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 924.209280] env[62096]: DEBUG oslo_concurrency.lockutils [None req-423ed1ce-80ce-46c1-a5f7-b083eaadd697 tempest-ServerTagsTestJSON-1314597396 tempest-ServerTagsTestJSON-1314597396-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 16.766s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 924.212087] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-423ed1ce-80ce-46c1-a5f7-b083eaadd697 tempest-ServerTagsTestJSON-1314597396 tempest-ServerTagsTestJSON-1314597396-project-member] Expecting reply to msg 9e8840c8b88a4bf2b10ac8444b017603 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 924.246038] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e2240eed05544dceb7477c12a39096f4 [ 924.246630] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9e8840c8b88a4bf2b10ac8444b017603 [ 924.571636] env[62096]: DEBUG nova.network.neutron [-] [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 924.572186] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 053ac7bea23a4b60877a6efe87fd0857 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 924.580644] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 053ac7bea23a4b60877a6efe87fd0857 [ 924.710389] env[62096]: DEBUG nova.compute.utils [None req-66abb748-9105-4dd1-b487-dfa24b044df5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Using /dev/sd instead of None {{(pid=62096) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 924.711041] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-66abb748-9105-4dd1-b487-dfa24b044df5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg 97dcb7bef9384e3ca164a825dc7a8f7c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 924.712337] env[62096]: DEBUG nova.compute.manager [None req-66abb748-9105-4dd1-b487-dfa24b044df5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] Allocating IP information in the background. {{(pid=62096) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 924.712517] env[62096]: DEBUG nova.network.neutron [None req-66abb748-9105-4dd1-b487-dfa24b044df5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] allocate_for_instance() {{(pid=62096) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 924.722442] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 97dcb7bef9384e3ca164a825dc7a8f7c [ 924.762720] env[62096]: DEBUG nova.policy [None req-66abb748-9105-4dd1-b487-dfa24b044df5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '09a4673f033c4d139efe4cd9ba4b7560', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd81a5a37184d4b29ad6df7e77dfd3ee4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62096) authorize /opt/stack/nova/nova/policy.py:203}} [ 924.851545] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36f3e007-c8ba-423b-8fc1-5fad37a7bb7c {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.859867] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5106808d-1736-4615-8c1a-fce3008d1914 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.891631] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49a49861-41b9-4028-88b6-c5d2d1199920 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.899095] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b39c6435-7008-402d-8aff-9a1ed8966f1c {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.913106] env[62096]: DEBUG nova.compute.provider_tree [None req-423ed1ce-80ce-46c1-a5f7-b083eaadd697 tempest-ServerTagsTestJSON-1314597396 tempest-ServerTagsTestJSON-1314597396-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 924.913603] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-423ed1ce-80ce-46c1-a5f7-b083eaadd697 tempest-ServerTagsTestJSON-1314597396 tempest-ServerTagsTestJSON-1314597396-project-member] Expecting reply to msg ca763f7d37604fb996e524bdf959f30f in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 924.921064] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ca763f7d37604fb996e524bdf959f30f [ 925.039625] env[62096]: DEBUG nova.network.neutron [None req-66abb748-9105-4dd1-b487-dfa24b044df5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] Successfully created port: 77aaac28-df93-47dd-a289-96e93d2bc51d {{(pid=62096) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 925.076438] env[62096]: INFO nova.compute.manager [-] [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] Took 1.02 seconds to deallocate network for instance. [ 925.077391] env[62096]: DEBUG nova.compute.claims [None req-0fdf712d-9ab7-47de-a5a9-f02790031e75 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] Aborting claim: {{(pid=62096) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 925.077391] env[62096]: DEBUG oslo_concurrency.lockutils [None req-0fdf712d-9ab7-47de-a5a9-f02790031e75 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 925.216236] env[62096]: DEBUG nova.compute.manager [None req-66abb748-9105-4dd1-b487-dfa24b044df5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] Start building block device mappings for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 925.218277] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-66abb748-9105-4dd1-b487-dfa24b044df5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg b970ef2c90f7406b92dd51b3fa5c1ff7 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 925.265102] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b970ef2c90f7406b92dd51b3fa5c1ff7 [ 925.416754] env[62096]: DEBUG nova.scheduler.client.report [None req-423ed1ce-80ce-46c1-a5f7-b083eaadd697 tempest-ServerTagsTestJSON-1314597396 tempest-ServerTagsTestJSON-1314597396-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 925.419462] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-423ed1ce-80ce-46c1-a5f7-b083eaadd697 tempest-ServerTagsTestJSON-1314597396 tempest-ServerTagsTestJSON-1314597396-project-member] Expecting reply to msg f541b1cd581d488eb5054c3fedfa57f2 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 925.432359] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f541b1cd581d488eb5054c3fedfa57f2 [ 925.731550] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-66abb748-9105-4dd1-b487-dfa24b044df5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg 9358221606004d24b10d833f8f8076af in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 925.761709] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9358221606004d24b10d833f8f8076af [ 925.816346] env[62096]: DEBUG nova.compute.manager [req-b3150067-55de-4d28-96a7-256ae2e2a53e req-a799ebf1-5071-450e-981e-037de66f36ac service nova] [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] Received event network-changed-77aaac28-df93-47dd-a289-96e93d2bc51d {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 925.816540] env[62096]: DEBUG nova.compute.manager [req-b3150067-55de-4d28-96a7-256ae2e2a53e req-a799ebf1-5071-450e-981e-037de66f36ac service nova] [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] Refreshing instance network info cache due to event network-changed-77aaac28-df93-47dd-a289-96e93d2bc51d. {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 925.816842] env[62096]: DEBUG oslo_concurrency.lockutils [req-b3150067-55de-4d28-96a7-256ae2e2a53e req-a799ebf1-5071-450e-981e-037de66f36ac service nova] Acquiring lock "refresh_cache-2d42e5f7-b293-4133-b279-63a8320bc19d" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 925.817005] env[62096]: DEBUG oslo_concurrency.lockutils [req-b3150067-55de-4d28-96a7-256ae2e2a53e req-a799ebf1-5071-450e-981e-037de66f36ac service nova] Acquired lock "refresh_cache-2d42e5f7-b293-4133-b279-63a8320bc19d" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 925.817170] env[62096]: DEBUG nova.network.neutron [req-b3150067-55de-4d28-96a7-256ae2e2a53e req-a799ebf1-5071-450e-981e-037de66f36ac service nova] [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] Refreshing network info cache for port 77aaac28-df93-47dd-a289-96e93d2bc51d {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 925.817582] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-b3150067-55de-4d28-96a7-256ae2e2a53e req-a799ebf1-5071-450e-981e-037de66f36ac service nova] Expecting reply to msg c71657bdb449418cb3420f485cab4fb7 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 925.824597] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c71657bdb449418cb3420f485cab4fb7 [ 925.842126] env[62096]: ERROR nova.compute.manager [None req-66abb748-9105-4dd1-b487-dfa24b044df5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 77aaac28-df93-47dd-a289-96e93d2bc51d, please check neutron logs for more information. [ 925.842126] env[62096]: ERROR nova.compute.manager Traceback (most recent call last): [ 925.842126] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 925.842126] env[62096]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 925.842126] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 925.842126] env[62096]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 925.842126] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 925.842126] env[62096]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 925.842126] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 925.842126] env[62096]: ERROR nova.compute.manager self.force_reraise() [ 925.842126] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 925.842126] env[62096]: ERROR nova.compute.manager raise self.value [ 925.842126] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 925.842126] env[62096]: ERROR nova.compute.manager updated_port = self._update_port( [ 925.842126] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 925.842126] env[62096]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 925.842583] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 925.842583] env[62096]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 925.842583] env[62096]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 77aaac28-df93-47dd-a289-96e93d2bc51d, please check neutron logs for more information. [ 925.842583] env[62096]: ERROR nova.compute.manager [ 925.842583] env[62096]: Traceback (most recent call last): [ 925.842583] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 925.842583] env[62096]: listener.cb(fileno) [ 925.842583] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 925.842583] env[62096]: result = function(*args, **kwargs) [ 925.842583] env[62096]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 925.842583] env[62096]: return func(*args, **kwargs) [ 925.842583] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 925.842583] env[62096]: raise e [ 925.842583] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 925.842583] env[62096]: nwinfo = self.network_api.allocate_for_instance( [ 925.842583] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 925.842583] env[62096]: created_port_ids = self._update_ports_for_instance( [ 925.842583] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 925.842583] env[62096]: with excutils.save_and_reraise_exception(): [ 925.842583] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 925.842583] env[62096]: self.force_reraise() [ 925.842583] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 925.842583] env[62096]: raise self.value [ 925.842583] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 925.842583] env[62096]: updated_port = self._update_port( [ 925.842583] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 925.842583] env[62096]: _ensure_no_port_binding_failure(port) [ 925.842583] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 925.842583] env[62096]: raise exception.PortBindingFailed(port_id=port['id']) [ 925.843354] env[62096]: nova.exception.PortBindingFailed: Binding failed for port 77aaac28-df93-47dd-a289-96e93d2bc51d, please check neutron logs for more information. [ 925.843354] env[62096]: Removing descriptor: 14 [ 925.922211] env[62096]: DEBUG oslo_concurrency.lockutils [None req-423ed1ce-80ce-46c1-a5f7-b083eaadd697 tempest-ServerTagsTestJSON-1314597396 tempest-ServerTagsTestJSON-1314597396-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.713s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 925.922862] env[62096]: ERROR nova.compute.manager [None req-423ed1ce-80ce-46c1-a5f7-b083eaadd697 tempest-ServerTagsTestJSON-1314597396 tempest-ServerTagsTestJSON-1314597396-project-member] [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 843c145b-efae-4f01-a279-a36832055d07, please check neutron logs for more information. [ 925.922862] env[62096]: ERROR nova.compute.manager [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] Traceback (most recent call last): [ 925.922862] env[62096]: ERROR nova.compute.manager [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 925.922862] env[62096]: ERROR nova.compute.manager [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] self.driver.spawn(context, instance, image_meta, [ 925.922862] env[62096]: ERROR nova.compute.manager [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 925.922862] env[62096]: ERROR nova.compute.manager [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 925.922862] env[62096]: ERROR nova.compute.manager [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 925.922862] env[62096]: ERROR nova.compute.manager [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] vm_ref = self.build_virtual_machine(instance, [ 925.922862] env[62096]: ERROR nova.compute.manager [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 925.922862] env[62096]: ERROR nova.compute.manager [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] vif_infos = vmwarevif.get_vif_info(self._session, [ 925.922862] env[62096]: ERROR nova.compute.manager [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 925.923415] env[62096]: ERROR nova.compute.manager [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] for vif in network_info: [ 925.923415] env[62096]: ERROR nova.compute.manager [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 925.923415] env[62096]: ERROR nova.compute.manager [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] return self._sync_wrapper(fn, *args, **kwargs) [ 925.923415] env[62096]: ERROR nova.compute.manager [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 925.923415] env[62096]: ERROR nova.compute.manager [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] self.wait() [ 925.923415] env[62096]: ERROR nova.compute.manager [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 925.923415] env[62096]: ERROR nova.compute.manager [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] self[:] = self._gt.wait() [ 925.923415] env[62096]: ERROR nova.compute.manager [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 925.923415] env[62096]: ERROR nova.compute.manager [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] return self._exit_event.wait() [ 925.923415] env[62096]: ERROR nova.compute.manager [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 925.923415] env[62096]: ERROR nova.compute.manager [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] current.throw(*self._exc) [ 925.923415] env[62096]: ERROR nova.compute.manager [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 925.923415] env[62096]: ERROR nova.compute.manager [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] result = function(*args, **kwargs) [ 925.924139] env[62096]: ERROR nova.compute.manager [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 925.924139] env[62096]: ERROR nova.compute.manager [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] return func(*args, **kwargs) [ 925.924139] env[62096]: ERROR nova.compute.manager [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 925.924139] env[62096]: ERROR nova.compute.manager [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] raise e [ 925.924139] env[62096]: ERROR nova.compute.manager [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 925.924139] env[62096]: ERROR nova.compute.manager [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] nwinfo = self.network_api.allocate_for_instance( [ 925.924139] env[62096]: ERROR nova.compute.manager [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 925.924139] env[62096]: ERROR nova.compute.manager [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] created_port_ids = self._update_ports_for_instance( [ 925.924139] env[62096]: ERROR nova.compute.manager [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 925.924139] env[62096]: ERROR nova.compute.manager [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] with excutils.save_and_reraise_exception(): [ 925.924139] env[62096]: ERROR nova.compute.manager [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 925.924139] env[62096]: ERROR nova.compute.manager [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] self.force_reraise() [ 925.924139] env[62096]: ERROR nova.compute.manager [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 925.924792] env[62096]: ERROR nova.compute.manager [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] raise self.value [ 925.924792] env[62096]: ERROR nova.compute.manager [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 925.924792] env[62096]: ERROR nova.compute.manager [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] updated_port = self._update_port( [ 925.924792] env[62096]: ERROR nova.compute.manager [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 925.924792] env[62096]: ERROR nova.compute.manager [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] _ensure_no_port_binding_failure(port) [ 925.924792] env[62096]: ERROR nova.compute.manager [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 925.924792] env[62096]: ERROR nova.compute.manager [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] raise exception.PortBindingFailed(port_id=port['id']) [ 925.924792] env[62096]: ERROR nova.compute.manager [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] nova.exception.PortBindingFailed: Binding failed for port 843c145b-efae-4f01-a279-a36832055d07, please check neutron logs for more information. [ 925.924792] env[62096]: ERROR nova.compute.manager [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] [ 925.924792] env[62096]: DEBUG nova.compute.utils [None req-423ed1ce-80ce-46c1-a5f7-b083eaadd697 tempest-ServerTagsTestJSON-1314597396 tempest-ServerTagsTestJSON-1314597396-project-member] [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] Binding failed for port 843c145b-efae-4f01-a279-a36832055d07, please check neutron logs for more information. {{(pid=62096) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 925.925256] env[62096]: DEBUG oslo_concurrency.lockutils [None req-b46ae41d-7569-429f-b0df-d60640b27629 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 16.770s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 925.926683] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b46ae41d-7569-429f-b0df-d60640b27629 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Expecting reply to msg 065fee0719e3467eba2e8204b66bcf48 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 925.927848] env[62096]: DEBUG nova.compute.manager [None req-423ed1ce-80ce-46c1-a5f7-b083eaadd697 tempest-ServerTagsTestJSON-1314597396 tempest-ServerTagsTestJSON-1314597396-project-member] [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] Build of instance e480be29-d25d-4ff2-8de8-26d6c4078ca9 was re-scheduled: Binding failed for port 843c145b-efae-4f01-a279-a36832055d07, please check neutron logs for more information. {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 925.928286] env[62096]: DEBUG nova.compute.manager [None req-423ed1ce-80ce-46c1-a5f7-b083eaadd697 tempest-ServerTagsTestJSON-1314597396 tempest-ServerTagsTestJSON-1314597396-project-member] [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] Unplugging VIFs for instance {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 925.928731] env[62096]: DEBUG oslo_concurrency.lockutils [None req-423ed1ce-80ce-46c1-a5f7-b083eaadd697 tempest-ServerTagsTestJSON-1314597396 tempest-ServerTagsTestJSON-1314597396-project-member] Acquiring lock "refresh_cache-e480be29-d25d-4ff2-8de8-26d6c4078ca9" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 925.928731] env[62096]: DEBUG oslo_concurrency.lockutils [None req-423ed1ce-80ce-46c1-a5f7-b083eaadd697 tempest-ServerTagsTestJSON-1314597396 tempest-ServerTagsTestJSON-1314597396-project-member] Acquired lock "refresh_cache-e480be29-d25d-4ff2-8de8-26d6c4078ca9" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 925.928868] env[62096]: DEBUG nova.network.neutron [None req-423ed1ce-80ce-46c1-a5f7-b083eaadd697 tempest-ServerTagsTestJSON-1314597396 tempest-ServerTagsTestJSON-1314597396-project-member] [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 925.929108] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-423ed1ce-80ce-46c1-a5f7-b083eaadd697 tempest-ServerTagsTestJSON-1314597396 tempest-ServerTagsTestJSON-1314597396-project-member] Expecting reply to msg 0de05b2df0d04f94b86953e1b4130519 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 925.935424] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0de05b2df0d04f94b86953e1b4130519 [ 925.961001] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 065fee0719e3467eba2e8204b66bcf48 [ 926.235176] env[62096]: DEBUG nova.compute.manager [None req-66abb748-9105-4dd1-b487-dfa24b044df5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] Start spawning the instance on the hypervisor. {{(pid=62096) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 926.260918] env[62096]: DEBUG nova.virt.hardware [None req-66abb748-9105-4dd1-b487-dfa24b044df5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-19T11:52:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-19T11:51:55Z,direct_url=,disk_format='vmdk',id=fb764baa-3805-45c4-a694-aa91b0932110,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bd76085d004b408bbcf1ebf76ba2593e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-19T11:51:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 926.261176] env[62096]: DEBUG nova.virt.hardware [None req-66abb748-9105-4dd1-b487-dfa24b044df5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Flavor limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 926.261332] env[62096]: DEBUG nova.virt.hardware [None req-66abb748-9105-4dd1-b487-dfa24b044df5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Image limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 926.261510] env[62096]: DEBUG nova.virt.hardware [None req-66abb748-9105-4dd1-b487-dfa24b044df5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Flavor pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 926.261652] env[62096]: DEBUG nova.virt.hardware [None req-66abb748-9105-4dd1-b487-dfa24b044df5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Image pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 926.261793] env[62096]: DEBUG nova.virt.hardware [None req-66abb748-9105-4dd1-b487-dfa24b044df5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 926.262011] env[62096]: DEBUG nova.virt.hardware [None req-66abb748-9105-4dd1-b487-dfa24b044df5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 926.262169] env[62096]: DEBUG nova.virt.hardware [None req-66abb748-9105-4dd1-b487-dfa24b044df5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 926.262333] env[62096]: DEBUG nova.virt.hardware [None req-66abb748-9105-4dd1-b487-dfa24b044df5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Got 1 possible topologies {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 926.262495] env[62096]: DEBUG nova.virt.hardware [None req-66abb748-9105-4dd1-b487-dfa24b044df5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 926.262667] env[62096]: DEBUG nova.virt.hardware [None req-66abb748-9105-4dd1-b487-dfa24b044df5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 926.263535] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69789151-49b3-4737-ad1c-5aedbd5aec2a {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.271473] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10a30bc8-3583-412d-b472-43b52fa0f29c {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.284980] env[62096]: ERROR nova.compute.manager [None req-66abb748-9105-4dd1-b487-dfa24b044df5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 77aaac28-df93-47dd-a289-96e93d2bc51d, please check neutron logs for more information. [ 926.284980] env[62096]: ERROR nova.compute.manager [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] Traceback (most recent call last): [ 926.284980] env[62096]: ERROR nova.compute.manager [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 926.284980] env[62096]: ERROR nova.compute.manager [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] yield resources [ 926.284980] env[62096]: ERROR nova.compute.manager [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 926.284980] env[62096]: ERROR nova.compute.manager [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] self.driver.spawn(context, instance, image_meta, [ 926.284980] env[62096]: ERROR nova.compute.manager [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 926.284980] env[62096]: ERROR nova.compute.manager [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 926.284980] env[62096]: ERROR nova.compute.manager [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 926.284980] env[62096]: ERROR nova.compute.manager [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] vm_ref = self.build_virtual_machine(instance, [ 926.284980] env[62096]: ERROR nova.compute.manager [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 926.285431] env[62096]: ERROR nova.compute.manager [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] vif_infos = vmwarevif.get_vif_info(self._session, [ 926.285431] env[62096]: ERROR nova.compute.manager [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 926.285431] env[62096]: ERROR nova.compute.manager [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] for vif in network_info: [ 926.285431] env[62096]: ERROR nova.compute.manager [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 926.285431] env[62096]: ERROR nova.compute.manager [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] return self._sync_wrapper(fn, *args, **kwargs) [ 926.285431] env[62096]: ERROR nova.compute.manager [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 926.285431] env[62096]: ERROR nova.compute.manager [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] self.wait() [ 926.285431] env[62096]: ERROR nova.compute.manager [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 926.285431] env[62096]: ERROR nova.compute.manager [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] self[:] = self._gt.wait() [ 926.285431] env[62096]: ERROR nova.compute.manager [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 926.285431] env[62096]: ERROR nova.compute.manager [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] return self._exit_event.wait() [ 926.285431] env[62096]: ERROR nova.compute.manager [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 926.285431] env[62096]: ERROR nova.compute.manager [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] current.throw(*self._exc) [ 926.285843] env[62096]: ERROR nova.compute.manager [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 926.285843] env[62096]: ERROR nova.compute.manager [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] result = function(*args, **kwargs) [ 926.285843] env[62096]: ERROR nova.compute.manager [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 926.285843] env[62096]: ERROR nova.compute.manager [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] return func(*args, **kwargs) [ 926.285843] env[62096]: ERROR nova.compute.manager [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 926.285843] env[62096]: ERROR nova.compute.manager [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] raise e [ 926.285843] env[62096]: ERROR nova.compute.manager [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 926.285843] env[62096]: ERROR nova.compute.manager [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] nwinfo = self.network_api.allocate_for_instance( [ 926.285843] env[62096]: ERROR nova.compute.manager [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 926.285843] env[62096]: ERROR nova.compute.manager [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] created_port_ids = self._update_ports_for_instance( [ 926.285843] env[62096]: ERROR nova.compute.manager [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 926.285843] env[62096]: ERROR nova.compute.manager [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] with excutils.save_and_reraise_exception(): [ 926.285843] env[62096]: ERROR nova.compute.manager [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 926.286258] env[62096]: ERROR nova.compute.manager [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] self.force_reraise() [ 926.286258] env[62096]: ERROR nova.compute.manager [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 926.286258] env[62096]: ERROR nova.compute.manager [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] raise self.value [ 926.286258] env[62096]: ERROR nova.compute.manager [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 926.286258] env[62096]: ERROR nova.compute.manager [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] updated_port = self._update_port( [ 926.286258] env[62096]: ERROR nova.compute.manager [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 926.286258] env[62096]: ERROR nova.compute.manager [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] _ensure_no_port_binding_failure(port) [ 926.286258] env[62096]: ERROR nova.compute.manager [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 926.286258] env[62096]: ERROR nova.compute.manager [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] raise exception.PortBindingFailed(port_id=port['id']) [ 926.286258] env[62096]: ERROR nova.compute.manager [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] nova.exception.PortBindingFailed: Binding failed for port 77aaac28-df93-47dd-a289-96e93d2bc51d, please check neutron logs for more information. [ 926.286258] env[62096]: ERROR nova.compute.manager [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] [ 926.286258] env[62096]: INFO nova.compute.manager [None req-66abb748-9105-4dd1-b487-dfa24b044df5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] Terminating instance [ 926.287539] env[62096]: DEBUG oslo_concurrency.lockutils [None req-66abb748-9105-4dd1-b487-dfa24b044df5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Acquiring lock "refresh_cache-2d42e5f7-b293-4133-b279-63a8320bc19d" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 926.336098] env[62096]: DEBUG nova.network.neutron [req-b3150067-55de-4d28-96a7-256ae2e2a53e req-a799ebf1-5071-450e-981e-037de66f36ac service nova] [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 926.436507] env[62096]: DEBUG nova.network.neutron [req-b3150067-55de-4d28-96a7-256ae2e2a53e req-a799ebf1-5071-450e-981e-037de66f36ac service nova] [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 926.436987] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-b3150067-55de-4d28-96a7-256ae2e2a53e req-a799ebf1-5071-450e-981e-037de66f36ac service nova] Expecting reply to msg dc3d9cf1dbe64d9b8897333e6975631c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 926.446248] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dc3d9cf1dbe64d9b8897333e6975631c [ 926.454556] env[62096]: DEBUG nova.network.neutron [None req-423ed1ce-80ce-46c1-a5f7-b083eaadd697 tempest-ServerTagsTestJSON-1314597396 tempest-ServerTagsTestJSON-1314597396-project-member] [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 926.565065] env[62096]: DEBUG nova.network.neutron [None req-423ed1ce-80ce-46c1-a5f7-b083eaadd697 tempest-ServerTagsTestJSON-1314597396 tempest-ServerTagsTestJSON-1314597396-project-member] [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 926.565597] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-423ed1ce-80ce-46c1-a5f7-b083eaadd697 tempest-ServerTagsTestJSON-1314597396 tempest-ServerTagsTestJSON-1314597396-project-member] Expecting reply to msg 635f10c28979414db4d9416dacef7cbc in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 926.573254] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-493b6d3d-5b8c-466f-a902-7c929c5015bf {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.576042] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 635f10c28979414db4d9416dacef7cbc [ 926.581347] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef90388e-ef72-430d-8b1c-af95aa6eb9fe {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.613669] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18fd5c3b-3558-4ad6-b787-020921a70db0 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.621382] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-466b122e-157a-49c4-a2bc-7c5a54b6630b {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.635337] env[62096]: DEBUG nova.compute.provider_tree [None req-b46ae41d-7569-429f-b0df-d60640b27629 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 926.635842] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b46ae41d-7569-429f-b0df-d60640b27629 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Expecting reply to msg e71b97bfd14a48f5a4586593bd1cfb99 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 926.642629] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e71b97bfd14a48f5a4586593bd1cfb99 [ 926.939486] env[62096]: DEBUG oslo_concurrency.lockutils [req-b3150067-55de-4d28-96a7-256ae2e2a53e req-a799ebf1-5071-450e-981e-037de66f36ac service nova] Releasing lock "refresh_cache-2d42e5f7-b293-4133-b279-63a8320bc19d" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 926.939901] env[62096]: DEBUG oslo_concurrency.lockutils [None req-66abb748-9105-4dd1-b487-dfa24b044df5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Acquired lock "refresh_cache-2d42e5f7-b293-4133-b279-63a8320bc19d" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 926.940105] env[62096]: DEBUG nova.network.neutron [None req-66abb748-9105-4dd1-b487-dfa24b044df5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 926.940546] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-66abb748-9105-4dd1-b487-dfa24b044df5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg 27b2f6678f3c42afbc315aa59379b322 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 926.947734] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 27b2f6678f3c42afbc315aa59379b322 [ 927.068090] env[62096]: DEBUG oslo_concurrency.lockutils [None req-423ed1ce-80ce-46c1-a5f7-b083eaadd697 tempest-ServerTagsTestJSON-1314597396 tempest-ServerTagsTestJSON-1314597396-project-member] Releasing lock "refresh_cache-e480be29-d25d-4ff2-8de8-26d6c4078ca9" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 927.068338] env[62096]: DEBUG nova.compute.manager [None req-423ed1ce-80ce-46c1-a5f7-b083eaadd697 tempest-ServerTagsTestJSON-1314597396 tempest-ServerTagsTestJSON-1314597396-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 927.068526] env[62096]: DEBUG nova.compute.manager [None req-423ed1ce-80ce-46c1-a5f7-b083eaadd697 tempest-ServerTagsTestJSON-1314597396 tempest-ServerTagsTestJSON-1314597396-project-member] [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 927.068691] env[62096]: DEBUG nova.network.neutron [None req-423ed1ce-80ce-46c1-a5f7-b083eaadd697 tempest-ServerTagsTestJSON-1314597396 tempest-ServerTagsTestJSON-1314597396-project-member] [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 927.082168] env[62096]: DEBUG nova.network.neutron [None req-423ed1ce-80ce-46c1-a5f7-b083eaadd697 tempest-ServerTagsTestJSON-1314597396 tempest-ServerTagsTestJSON-1314597396-project-member] [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 927.082691] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-423ed1ce-80ce-46c1-a5f7-b083eaadd697 tempest-ServerTagsTestJSON-1314597396 tempest-ServerTagsTestJSON-1314597396-project-member] Expecting reply to msg 4fdfabf1b2234522862590fef7d86fc1 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 927.090577] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4fdfabf1b2234522862590fef7d86fc1 [ 927.138844] env[62096]: DEBUG nova.scheduler.client.report [None req-b46ae41d-7569-429f-b0df-d60640b27629 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 927.141223] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b46ae41d-7569-429f-b0df-d60640b27629 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Expecting reply to msg aebf10eebe054dc1b91b00489287e719 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 927.151930] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg aebf10eebe054dc1b91b00489287e719 [ 927.470808] env[62096]: DEBUG nova.network.neutron [None req-66abb748-9105-4dd1-b487-dfa24b044df5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 927.560548] env[62096]: DEBUG nova.network.neutron [None req-66abb748-9105-4dd1-b487-dfa24b044df5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 927.561070] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-66abb748-9105-4dd1-b487-dfa24b044df5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg bafbe762aad946abbb9da4d741c603f1 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 927.569617] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bafbe762aad946abbb9da4d741c603f1 [ 927.584416] env[62096]: DEBUG nova.network.neutron [None req-423ed1ce-80ce-46c1-a5f7-b083eaadd697 tempest-ServerTagsTestJSON-1314597396 tempest-ServerTagsTestJSON-1314597396-project-member] [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 927.584899] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-423ed1ce-80ce-46c1-a5f7-b083eaadd697 tempest-ServerTagsTestJSON-1314597396 tempest-ServerTagsTestJSON-1314597396-project-member] Expecting reply to msg c37087b39d664342a40c45f389e9ac78 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 927.603710] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c37087b39d664342a40c45f389e9ac78 [ 927.643601] env[62096]: DEBUG oslo_concurrency.lockutils [None req-b46ae41d-7569-429f-b0df-d60640b27629 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.719s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 927.644282] env[62096]: ERROR nova.compute.manager [None req-b46ae41d-7569-429f-b0df-d60640b27629 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port d3b0c730-8282-4ddc-8872-0178f3ddc2e6, please check neutron logs for more information. [ 927.644282] env[62096]: ERROR nova.compute.manager [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] Traceback (most recent call last): [ 927.644282] env[62096]: ERROR nova.compute.manager [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 927.644282] env[62096]: ERROR nova.compute.manager [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] self.driver.spawn(context, instance, image_meta, [ 927.644282] env[62096]: ERROR nova.compute.manager [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 927.644282] env[62096]: ERROR nova.compute.manager [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 927.644282] env[62096]: ERROR nova.compute.manager [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 927.644282] env[62096]: ERROR nova.compute.manager [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] vm_ref = self.build_virtual_machine(instance, [ 927.644282] env[62096]: ERROR nova.compute.manager [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 927.644282] env[62096]: ERROR nova.compute.manager [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] vif_infos = vmwarevif.get_vif_info(self._session, [ 927.644282] env[62096]: ERROR nova.compute.manager [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 927.644787] env[62096]: ERROR nova.compute.manager [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] for vif in network_info: [ 927.644787] env[62096]: ERROR nova.compute.manager [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 927.644787] env[62096]: ERROR nova.compute.manager [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] return self._sync_wrapper(fn, *args, **kwargs) [ 927.644787] env[62096]: ERROR nova.compute.manager [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 927.644787] env[62096]: ERROR nova.compute.manager [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] self.wait() [ 927.644787] env[62096]: ERROR nova.compute.manager [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 927.644787] env[62096]: ERROR nova.compute.manager [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] self[:] = self._gt.wait() [ 927.644787] env[62096]: ERROR nova.compute.manager [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 927.644787] env[62096]: ERROR nova.compute.manager [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] return self._exit_event.wait() [ 927.644787] env[62096]: ERROR nova.compute.manager [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 927.644787] env[62096]: ERROR nova.compute.manager [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] current.throw(*self._exc) [ 927.644787] env[62096]: ERROR nova.compute.manager [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 927.644787] env[62096]: ERROR nova.compute.manager [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] result = function(*args, **kwargs) [ 927.645373] env[62096]: ERROR nova.compute.manager [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 927.645373] env[62096]: ERROR nova.compute.manager [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] return func(*args, **kwargs) [ 927.645373] env[62096]: ERROR nova.compute.manager [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 927.645373] env[62096]: ERROR nova.compute.manager [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] raise e [ 927.645373] env[62096]: ERROR nova.compute.manager [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 927.645373] env[62096]: ERROR nova.compute.manager [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] nwinfo = self.network_api.allocate_for_instance( [ 927.645373] env[62096]: ERROR nova.compute.manager [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 927.645373] env[62096]: ERROR nova.compute.manager [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] created_port_ids = self._update_ports_for_instance( [ 927.645373] env[62096]: ERROR nova.compute.manager [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 927.645373] env[62096]: ERROR nova.compute.manager [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] with excutils.save_and_reraise_exception(): [ 927.645373] env[62096]: ERROR nova.compute.manager [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 927.645373] env[62096]: ERROR nova.compute.manager [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] self.force_reraise() [ 927.645373] env[62096]: ERROR nova.compute.manager [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 927.645902] env[62096]: ERROR nova.compute.manager [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] raise self.value [ 927.645902] env[62096]: ERROR nova.compute.manager [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 927.645902] env[62096]: ERROR nova.compute.manager [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] updated_port = self._update_port( [ 927.645902] env[62096]: ERROR nova.compute.manager [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 927.645902] env[62096]: ERROR nova.compute.manager [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] _ensure_no_port_binding_failure(port) [ 927.645902] env[62096]: ERROR nova.compute.manager [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 927.645902] env[62096]: ERROR nova.compute.manager [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] raise exception.PortBindingFailed(port_id=port['id']) [ 927.645902] env[62096]: ERROR nova.compute.manager [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] nova.exception.PortBindingFailed: Binding failed for port d3b0c730-8282-4ddc-8872-0178f3ddc2e6, please check neutron logs for more information. [ 927.645902] env[62096]: ERROR nova.compute.manager [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] [ 927.645902] env[62096]: DEBUG nova.compute.utils [None req-b46ae41d-7569-429f-b0df-d60640b27629 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] Binding failed for port d3b0c730-8282-4ddc-8872-0178f3ddc2e6, please check neutron logs for more information. {{(pid=62096) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 927.646511] env[62096]: DEBUG nova.compute.manager [None req-b46ae41d-7569-429f-b0df-d60640b27629 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] Build of instance 1c02c726-ab23-49e5-8d55-b222c712225a was re-scheduled: Binding failed for port d3b0c730-8282-4ddc-8872-0178f3ddc2e6, please check neutron logs for more information. {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 927.646915] env[62096]: DEBUG nova.compute.manager [None req-b46ae41d-7569-429f-b0df-d60640b27629 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] Unplugging VIFs for instance {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 927.647133] env[62096]: DEBUG oslo_concurrency.lockutils [None req-b46ae41d-7569-429f-b0df-d60640b27629 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Acquiring lock "refresh_cache-1c02c726-ab23-49e5-8d55-b222c712225a" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 927.647280] env[62096]: DEBUG oslo_concurrency.lockutils [None req-b46ae41d-7569-429f-b0df-d60640b27629 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Acquired lock "refresh_cache-1c02c726-ab23-49e5-8d55-b222c712225a" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 927.647434] env[62096]: DEBUG nova.network.neutron [None req-b46ae41d-7569-429f-b0df-d60640b27629 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 927.647822] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b46ae41d-7569-429f-b0df-d60640b27629 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Expecting reply to msg f157cfc5eb5a46eaaf3da5470e5d7d33 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 927.648928] env[62096]: DEBUG oslo_concurrency.lockutils [None req-cad51331-71f6-4082-9e97-6ae9fadbb222 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.240s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 927.650320] env[62096]: INFO nova.compute.claims [None req-cad51331-71f6-4082-9e97-6ae9fadbb222 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] [instance: dc451ce2-65de-4497-ad48-fd776f73cb80] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 927.652183] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-cad51331-71f6-4082-9e97-6ae9fadbb222 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Expecting reply to msg e0061579a9c84f59a9016ba8f573aa52 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 927.654440] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f157cfc5eb5a46eaaf3da5470e5d7d33 [ 927.688683] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e0061579a9c84f59a9016ba8f573aa52 [ 927.843712] env[62096]: DEBUG nova.compute.manager [req-9ba01703-b1c1-4a41-adf3-f803201bc06e req-131d6fee-a561-4a08-bbf8-b156b03d567d service nova] [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] Received event network-vif-deleted-77aaac28-df93-47dd-a289-96e93d2bc51d {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 928.063370] env[62096]: DEBUG oslo_concurrency.lockutils [None req-66abb748-9105-4dd1-b487-dfa24b044df5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Releasing lock "refresh_cache-2d42e5f7-b293-4133-b279-63a8320bc19d" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 928.063812] env[62096]: DEBUG nova.compute.manager [None req-66abb748-9105-4dd1-b487-dfa24b044df5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] Start destroying the instance on the hypervisor. {{(pid=62096) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 928.064083] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-66abb748-9105-4dd1-b487-dfa24b044df5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] Destroying instance {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 928.064414] env[62096]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-47375ea2-ff9e-479a-9743-7101aca37ab1 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.073616] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71503236-8ea9-4ce6-9aee-446e0e1a32ec {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.089059] env[62096]: INFO nova.compute.manager [None req-423ed1ce-80ce-46c1-a5f7-b083eaadd697 tempest-ServerTagsTestJSON-1314597396 tempest-ServerTagsTestJSON-1314597396-project-member] [instance: e480be29-d25d-4ff2-8de8-26d6c4078ca9] Took 1.02 seconds to deallocate network for instance. [ 928.090590] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-423ed1ce-80ce-46c1-a5f7-b083eaadd697 tempest-ServerTagsTestJSON-1314597396 tempest-ServerTagsTestJSON-1314597396-project-member] Expecting reply to msg 9f114469144e45d9bcf03cc533979404 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 928.095958] env[62096]: WARNING nova.virt.vmwareapi.vmops [None req-66abb748-9105-4dd1-b487-dfa24b044df5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 2d42e5f7-b293-4133-b279-63a8320bc19d could not be found. [ 928.096199] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-66abb748-9105-4dd1-b487-dfa24b044df5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] Instance destroyed {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 928.096392] env[62096]: INFO nova.compute.manager [None req-66abb748-9105-4dd1-b487-dfa24b044df5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] Took 0.03 seconds to destroy the instance on the hypervisor. [ 928.096628] env[62096]: DEBUG oslo.service.loopingcall [None req-66abb748-9105-4dd1-b487-dfa24b044df5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 928.096841] env[62096]: DEBUG nova.compute.manager [-] [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 928.096935] env[62096]: DEBUG nova.network.neutron [-] [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 928.110711] env[62096]: DEBUG nova.network.neutron [-] [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 928.111254] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 7a84b4a209064a6581f13d682393914e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 928.118545] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7a84b4a209064a6581f13d682393914e [ 928.140771] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9f114469144e45d9bcf03cc533979404 [ 928.155606] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-cad51331-71f6-4082-9e97-6ae9fadbb222 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Expecting reply to msg 69af4ad1a30f4e85acbc9166e8609de8 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 928.174970] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 69af4ad1a30f4e85acbc9166e8609de8 [ 928.310346] env[62096]: DEBUG nova.network.neutron [None req-b46ae41d-7569-429f-b0df-d60640b27629 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 928.397118] env[62096]: DEBUG nova.network.neutron [None req-b46ae41d-7569-429f-b0df-d60640b27629 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 928.397648] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b46ae41d-7569-429f-b0df-d60640b27629 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Expecting reply to msg b4908f31cb5f4655b0791fa6c6f21b97 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 928.405921] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b4908f31cb5f4655b0791fa6c6f21b97 [ 928.594909] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-423ed1ce-80ce-46c1-a5f7-b083eaadd697 tempest-ServerTagsTestJSON-1314597396 tempest-ServerTagsTestJSON-1314597396-project-member] Expecting reply to msg a006045e36fc4fb19b20878b676bc019 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 928.613680] env[62096]: DEBUG nova.network.neutron [-] [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 928.614160] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg bb82ac3f971d4192a2c82a43903ddefc in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 928.623543] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bb82ac3f971d4192a2c82a43903ddefc [ 928.639746] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a006045e36fc4fb19b20878b676bc019 [ 928.798400] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b3effb8-f388-46d0-b158-11077f624d32 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.806203] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17bd45fc-516c-40ba-b015-18e1d411e110 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.850483] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61b53c57-ea75-4048-8a3a-f3f3fef3e796 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.858684] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-447805d1-b2f8-4775-af34-96eb61d1b062 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.871578] env[62096]: DEBUG nova.compute.provider_tree [None req-cad51331-71f6-4082-9e97-6ae9fadbb222 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 928.872407] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-cad51331-71f6-4082-9e97-6ae9fadbb222 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Expecting reply to msg c40e52098fbf4fe58e1636533af35fe4 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 928.899847] env[62096]: DEBUG oslo_concurrency.lockutils [None req-b46ae41d-7569-429f-b0df-d60640b27629 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Releasing lock "refresh_cache-1c02c726-ab23-49e5-8d55-b222c712225a" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 928.900096] env[62096]: DEBUG nova.compute.manager [None req-b46ae41d-7569-429f-b0df-d60640b27629 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 928.900290] env[62096]: DEBUG nova.compute.manager [None req-b46ae41d-7569-429f-b0df-d60640b27629 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 928.900458] env[62096]: DEBUG nova.network.neutron [None req-b46ae41d-7569-429f-b0df-d60640b27629 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 928.910139] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c40e52098fbf4fe58e1636533af35fe4 [ 928.916467] env[62096]: DEBUG nova.network.neutron [None req-b46ae41d-7569-429f-b0df-d60640b27629 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 928.917000] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b46ae41d-7569-429f-b0df-d60640b27629 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Expecting reply to msg b259854698cd4f2c8aa840d6bc6c4ce9 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 928.922841] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b259854698cd4f2c8aa840d6bc6c4ce9 [ 929.117153] env[62096]: INFO nova.scheduler.client.report [None req-423ed1ce-80ce-46c1-a5f7-b083eaadd697 tempest-ServerTagsTestJSON-1314597396 tempest-ServerTagsTestJSON-1314597396-project-member] Deleted allocations for instance e480be29-d25d-4ff2-8de8-26d6c4078ca9 [ 929.123496] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-423ed1ce-80ce-46c1-a5f7-b083eaadd697 tempest-ServerTagsTestJSON-1314597396 tempest-ServerTagsTestJSON-1314597396-project-member] Expecting reply to msg c998b3549baa446687c49c677a0154a0 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 929.124979] env[62096]: INFO nova.compute.manager [-] [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] Took 1.03 seconds to deallocate network for instance. [ 929.127116] env[62096]: DEBUG nova.compute.claims [None req-66abb748-9105-4dd1-b487-dfa24b044df5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] Aborting claim: {{(pid=62096) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 929.127217] env[62096]: DEBUG oslo_concurrency.lockutils [None req-66abb748-9105-4dd1-b487-dfa24b044df5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 929.138475] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c998b3549baa446687c49c677a0154a0 [ 929.376249] env[62096]: DEBUG nova.scheduler.client.report [None req-cad51331-71f6-4082-9e97-6ae9fadbb222 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 929.379670] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-cad51331-71f6-4082-9e97-6ae9fadbb222 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Expecting reply to msg 9eae2bfc1c004967bd7bc5964678994b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 929.392745] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9eae2bfc1c004967bd7bc5964678994b [ 929.419848] env[62096]: DEBUG nova.network.neutron [None req-b46ae41d-7569-429f-b0df-d60640b27629 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 929.420367] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b46ae41d-7569-429f-b0df-d60640b27629 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Expecting reply to msg 81a8b3d70631478aab3511fb3475f2e0 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 929.427866] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 81a8b3d70631478aab3511fb3475f2e0 [ 929.626102] env[62096]: DEBUG oslo_concurrency.lockutils [None req-423ed1ce-80ce-46c1-a5f7-b083eaadd697 tempest-ServerTagsTestJSON-1314597396 tempest-ServerTagsTestJSON-1314597396-project-member] Lock "e480be29-d25d-4ff2-8de8-26d6c4078ca9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 89.294s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 929.882933] env[62096]: DEBUG oslo_concurrency.lockutils [None req-cad51331-71f6-4082-9e97-6ae9fadbb222 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.234s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 929.883455] env[62096]: DEBUG nova.compute.manager [None req-cad51331-71f6-4082-9e97-6ae9fadbb222 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] [instance: dc451ce2-65de-4497-ad48-fd776f73cb80] Start building networks asynchronously for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 929.885216] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-cad51331-71f6-4082-9e97-6ae9fadbb222 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Expecting reply to msg 0d5b519931044d16ae63763b9195455b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 929.886224] env[62096]: DEBUG oslo_concurrency.lockutils [None req-5865a84c-0be2-46e8-ab17-dbd444d3f325 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.737s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 929.887598] env[62096]: INFO nova.compute.claims [None req-5865a84c-0be2-46e8-ab17-dbd444d3f325 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] [instance: 9f3ce570-878f-48bf-a08c-0387b0556785] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 929.889203] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-5865a84c-0be2-46e8-ab17-dbd444d3f325 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Expecting reply to msg 1d83812ed0914e0c903e066ad1ecf878 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 929.914503] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0d5b519931044d16ae63763b9195455b [ 929.921938] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1d83812ed0914e0c903e066ad1ecf878 [ 929.922886] env[62096]: INFO nova.compute.manager [None req-b46ae41d-7569-429f-b0df-d60640b27629 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] [instance: 1c02c726-ab23-49e5-8d55-b222c712225a] Took 1.02 seconds to deallocate network for instance. [ 929.924625] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b46ae41d-7569-429f-b0df-d60640b27629 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Expecting reply to msg fc2cb091c10f4e55b1943f952ed891d3 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 929.952952] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fc2cb091c10f4e55b1943f952ed891d3 [ 930.392446] env[62096]: DEBUG nova.compute.utils [None req-cad51331-71f6-4082-9e97-6ae9fadbb222 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Using /dev/sd instead of None {{(pid=62096) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 930.392868] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-cad51331-71f6-4082-9e97-6ae9fadbb222 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Expecting reply to msg 29a36b2cd116445ca569f83a8c0fbf2b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 930.395127] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-5865a84c-0be2-46e8-ab17-dbd444d3f325 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Expecting reply to msg e1b297564f784d75b3b04a579d19bbcb in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 930.396219] env[62096]: DEBUG nova.compute.manager [None req-cad51331-71f6-4082-9e97-6ae9fadbb222 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] [instance: dc451ce2-65de-4497-ad48-fd776f73cb80] Not allocating networking since 'none' was specified. {{(pid=62096) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 930.403471] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e1b297564f784d75b3b04a579d19bbcb [ 930.404043] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 29a36b2cd116445ca569f83a8c0fbf2b [ 930.429147] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b46ae41d-7569-429f-b0df-d60640b27629 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Expecting reply to msg ae86f89fa197444cbab3a05114cc1e53 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 930.463456] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ae86f89fa197444cbab3a05114cc1e53 [ 930.897695] env[62096]: DEBUG nova.compute.manager [None req-cad51331-71f6-4082-9e97-6ae9fadbb222 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] [instance: dc451ce2-65de-4497-ad48-fd776f73cb80] Start building block device mappings for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 930.900120] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-cad51331-71f6-4082-9e97-6ae9fadbb222 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Expecting reply to msg 92748eefbcfb43ccbf8393b43022e65a in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 930.930884] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 92748eefbcfb43ccbf8393b43022e65a [ 930.961376] env[62096]: INFO nova.scheduler.client.report [None req-b46ae41d-7569-429f-b0df-d60640b27629 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Deleted allocations for instance 1c02c726-ab23-49e5-8d55-b222c712225a [ 930.968049] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-b46ae41d-7569-429f-b0df-d60640b27629 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Expecting reply to msg 655d601e85bf43d6874dd6695d91bc17 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 930.978143] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 655d601e85bf43d6874dd6695d91bc17 [ 931.028942] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d46ba89-dc79-4719-9973-1f82b140ca0e {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.037037] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d8dd3b8-4108-42ea-85fb-0466946b92bc {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.068649] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63f9ef8a-3d01-406a-8258-540ea0aa629b {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.076399] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-741cc983-826b-46a3-a95c-e973cf3640ca {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.092094] env[62096]: DEBUG nova.compute.provider_tree [None req-5865a84c-0be2-46e8-ab17-dbd444d3f325 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 931.092776] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-5865a84c-0be2-46e8-ab17-dbd444d3f325 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Expecting reply to msg 14ecfa647b1240c0bf04840e3bac391f in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 931.101336] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 14ecfa647b1240c0bf04840e3bac391f [ 931.407129] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-cad51331-71f6-4082-9e97-6ae9fadbb222 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Expecting reply to msg 2e61a4de40af45c2b1d8f3e5a0330db0 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 931.439129] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2e61a4de40af45c2b1d8f3e5a0330db0 [ 931.471110] env[62096]: DEBUG oslo_concurrency.lockutils [None req-b46ae41d-7569-429f-b0df-d60640b27629 tempest-AttachInterfacesTestJSON-773831744 tempest-AttachInterfacesTestJSON-773831744-project-member] Lock "1c02c726-ab23-49e5-8d55-b222c712225a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 69.252s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 931.596082] env[62096]: DEBUG nova.scheduler.client.report [None req-5865a84c-0be2-46e8-ab17-dbd444d3f325 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 931.598570] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-5865a84c-0be2-46e8-ab17-dbd444d3f325 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Expecting reply to msg 133f3dba306b4cf2b2b02d42b22548fc in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 931.609438] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 133f3dba306b4cf2b2b02d42b22548fc [ 931.911422] env[62096]: DEBUG nova.compute.manager [None req-cad51331-71f6-4082-9e97-6ae9fadbb222 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] [instance: dc451ce2-65de-4497-ad48-fd776f73cb80] Start spawning the instance on the hypervisor. {{(pid=62096) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 931.935719] env[62096]: DEBUG nova.virt.hardware [None req-cad51331-71f6-4082-9e97-6ae9fadbb222 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-19T11:52:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-19T11:51:55Z,direct_url=,disk_format='vmdk',id=fb764baa-3805-45c4-a694-aa91b0932110,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bd76085d004b408bbcf1ebf76ba2593e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-19T11:51:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 931.935948] env[62096]: DEBUG nova.virt.hardware [None req-cad51331-71f6-4082-9e97-6ae9fadbb222 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Flavor limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 931.936117] env[62096]: DEBUG nova.virt.hardware [None req-cad51331-71f6-4082-9e97-6ae9fadbb222 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Image limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 931.936302] env[62096]: DEBUG nova.virt.hardware [None req-cad51331-71f6-4082-9e97-6ae9fadbb222 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Flavor pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 931.936444] env[62096]: DEBUG nova.virt.hardware [None req-cad51331-71f6-4082-9e97-6ae9fadbb222 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Image pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 931.936587] env[62096]: DEBUG nova.virt.hardware [None req-cad51331-71f6-4082-9e97-6ae9fadbb222 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 931.936794] env[62096]: DEBUG nova.virt.hardware [None req-cad51331-71f6-4082-9e97-6ae9fadbb222 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 931.936949] env[62096]: DEBUG nova.virt.hardware [None req-cad51331-71f6-4082-9e97-6ae9fadbb222 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 931.937122] env[62096]: DEBUG nova.virt.hardware [None req-cad51331-71f6-4082-9e97-6ae9fadbb222 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Got 1 possible topologies {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 931.937289] env[62096]: DEBUG nova.virt.hardware [None req-cad51331-71f6-4082-9e97-6ae9fadbb222 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 931.937457] env[62096]: DEBUG nova.virt.hardware [None req-cad51331-71f6-4082-9e97-6ae9fadbb222 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 931.938314] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0d12f97-f3a4-4261-9176-13dbad239b1e {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.946535] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dae431d7-eddb-47f3-b9bb-2923a26386b8 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.960699] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-cad51331-71f6-4082-9e97-6ae9fadbb222 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] [instance: dc451ce2-65de-4497-ad48-fd776f73cb80] Instance VIF info [] {{(pid=62096) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 931.966199] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [None req-cad51331-71f6-4082-9e97-6ae9fadbb222 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Creating folder: Project (9ecfe77150fc42e68a40fbcc538c0cd3). Parent ref: group-v107847. {{(pid=62096) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 931.966464] env[62096]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e0cc5bee-4d9a-433d-a265-24d211ec5567 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.975833] env[62096]: INFO nova.virt.vmwareapi.vm_util [None req-cad51331-71f6-4082-9e97-6ae9fadbb222 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Created folder: Project (9ecfe77150fc42e68a40fbcc538c0cd3) in parent group-v107847. [ 931.976030] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [None req-cad51331-71f6-4082-9e97-6ae9fadbb222 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Creating folder: Instances. Parent ref: group-v107875. {{(pid=62096) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 931.976255] env[62096]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2a73e5f8-178a-4ffc-b208-07f58ce9fbd0 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.985072] env[62096]: INFO nova.virt.vmwareapi.vm_util [None req-cad51331-71f6-4082-9e97-6ae9fadbb222 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Created folder: Instances in parent group-v107875. [ 931.985458] env[62096]: DEBUG oslo.service.loopingcall [None req-cad51331-71f6-4082-9e97-6ae9fadbb222 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 931.985657] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dc451ce2-65de-4497-ad48-fd776f73cb80] Creating VM on the ESX host {{(pid=62096) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 931.985849] env[62096]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-952a24f6-e6a8-49f8-b3bd-741ac51db1ac {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.005821] env[62096]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 932.005821] env[62096]: value = "task-397462" [ 932.005821] env[62096]: _type = "Task" [ 932.005821] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.016304] env[62096]: DEBUG oslo_vmware.api [-] Task: {'id': task-397462, 'name': CreateVM_Task} progress is 0%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.101422] env[62096]: DEBUG oslo_concurrency.lockutils [None req-5865a84c-0be2-46e8-ab17-dbd444d3f325 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.215s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 932.101988] env[62096]: DEBUG nova.compute.manager [None req-5865a84c-0be2-46e8-ab17-dbd444d3f325 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] [instance: 9f3ce570-878f-48bf-a08c-0387b0556785] Start building networks asynchronously for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 932.103801] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-5865a84c-0be2-46e8-ab17-dbd444d3f325 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Expecting reply to msg 2ab1b07db6324d69941aff9285598982 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 932.104932] env[62096]: DEBUG oslo_concurrency.lockutils [None req-abde4e35-3d25-4bdf-ac75-6b77642d395c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.208s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 932.106444] env[62096]: INFO nova.compute.claims [None req-abde4e35-3d25-4bdf-ac75-6b77642d395c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 932.107984] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-abde4e35-3d25-4bdf-ac75-6b77642d395c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg 015d94610f3e4a39aeeb96a98ff68580 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 932.148102] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2ab1b07db6324d69941aff9285598982 [ 932.148603] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 015d94610f3e4a39aeeb96a98ff68580 [ 932.515972] env[62096]: DEBUG oslo_vmware.api [-] Task: {'id': task-397462, 'name': CreateVM_Task, 'duration_secs': 0.240817} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.516190] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dc451ce2-65de-4497-ad48-fd776f73cb80] Created VM on the ESX host {{(pid=62096) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 932.516623] env[62096]: DEBUG oslo_concurrency.lockutils [None req-cad51331-71f6-4082-9e97-6ae9fadbb222 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 932.516778] env[62096]: DEBUG oslo_concurrency.lockutils [None req-cad51331-71f6-4082-9e97-6ae9fadbb222 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Acquired lock "[datastore2] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 932.517160] env[62096]: DEBUG oslo_concurrency.lockutils [None req-cad51331-71f6-4082-9e97-6ae9fadbb222 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 932.517412] env[62096]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-55aa4d4e-92d0-4158-803d-f365e9503599 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.521738] env[62096]: DEBUG oslo_vmware.api [None req-cad51331-71f6-4082-9e97-6ae9fadbb222 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Waiting for the task: (returnval){ [ 932.521738] env[62096]: value = "session[5264b2d0-7e3b-3d3c-bfd1-bd630cad6eae]52353fab-afa3-572e-9635-d607118e8dfa" [ 932.521738] env[62096]: _type = "Task" [ 932.521738] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.529240] env[62096]: DEBUG oslo_vmware.api [None req-cad51331-71f6-4082-9e97-6ae9fadbb222 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Task: {'id': session[5264b2d0-7e3b-3d3c-bfd1-bd630cad6eae]52353fab-afa3-572e-9635-d607118e8dfa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.611077] env[62096]: DEBUG nova.compute.utils [None req-5865a84c-0be2-46e8-ab17-dbd444d3f325 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Using /dev/sd instead of None {{(pid=62096) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 932.611850] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-5865a84c-0be2-46e8-ab17-dbd444d3f325 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Expecting reply to msg 07888ef28b3d490695a575b21ff18314 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 932.616955] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-abde4e35-3d25-4bdf-ac75-6b77642d395c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg a281a162592c4a5a92ca5b7216f7f4ea in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 932.616955] env[62096]: DEBUG nova.compute.manager [None req-5865a84c-0be2-46e8-ab17-dbd444d3f325 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] [instance: 9f3ce570-878f-48bf-a08c-0387b0556785] Not allocating networking since 'none' was specified. {{(pid=62096) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 932.622351] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a281a162592c4a5a92ca5b7216f7f4ea [ 932.622836] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 07888ef28b3d490695a575b21ff18314 [ 933.032279] env[62096]: DEBUG oslo_vmware.api [None req-cad51331-71f6-4082-9e97-6ae9fadbb222 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Task: {'id': session[5264b2d0-7e3b-3d3c-bfd1-bd630cad6eae]52353fab-afa3-572e-9635-d607118e8dfa, 'name': SearchDatastore_Task, 'duration_secs': 0.008621} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.032769] env[62096]: DEBUG oslo_concurrency.lockutils [None req-cad51331-71f6-4082-9e97-6ae9fadbb222 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Releasing lock "[datastore2] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 933.032849] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-cad51331-71f6-4082-9e97-6ae9fadbb222 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] [instance: dc451ce2-65de-4497-ad48-fd776f73cb80] Processing image fb764baa-3805-45c4-a694-aa91b0932110 {{(pid=62096) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 933.033043] env[62096]: DEBUG oslo_concurrency.lockutils [None req-cad51331-71f6-4082-9e97-6ae9fadbb222 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110/fb764baa-3805-45c4-a694-aa91b0932110.vmdk" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 933.033161] env[62096]: DEBUG oslo_concurrency.lockutils [None req-cad51331-71f6-4082-9e97-6ae9fadbb222 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Acquired lock "[datastore2] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110/fb764baa-3805-45c4-a694-aa91b0932110.vmdk" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 933.033304] env[62096]: DEBUG nova.virt.vmwareapi.ds_util [None req-cad51331-71f6-4082-9e97-6ae9fadbb222 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62096) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 933.033592] env[62096]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-352219f6-5536-455b-8399-9417b09793de {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.041341] env[62096]: DEBUG nova.virt.vmwareapi.ds_util [None req-cad51331-71f6-4082-9e97-6ae9fadbb222 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62096) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 933.041437] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-cad51331-71f6-4082-9e97-6ae9fadbb222 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62096) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 933.042301] env[62096]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-051fd2c1-30d4-483e-8701-ddde8203d97e {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.047560] env[62096]: DEBUG oslo_vmware.api [None req-cad51331-71f6-4082-9e97-6ae9fadbb222 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Waiting for the task: (returnval){ [ 933.047560] env[62096]: value = "session[5264b2d0-7e3b-3d3c-bfd1-bd630cad6eae]521ba924-b7b2-6e8b-acb2-3710e44d4664" [ 933.047560] env[62096]: _type = "Task" [ 933.047560] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.055683] env[62096]: DEBUG oslo_vmware.api [None req-cad51331-71f6-4082-9e97-6ae9fadbb222 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Task: {'id': session[5264b2d0-7e3b-3d3c-bfd1-bd630cad6eae]521ba924-b7b2-6e8b-acb2-3710e44d4664, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.116557] env[62096]: DEBUG nova.compute.manager [None req-5865a84c-0be2-46e8-ab17-dbd444d3f325 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] [instance: 9f3ce570-878f-48bf-a08c-0387b0556785] Start building block device mappings for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 933.118278] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-5865a84c-0be2-46e8-ab17-dbd444d3f325 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Expecting reply to msg f13999b8c6ae40b690f9a88b2f1790a5 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 933.154930] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f13999b8c6ae40b690f9a88b2f1790a5 [ 933.228492] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-501340b5-77d3-420a-bb69-33da60b88e7e {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.235934] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03b7b79d-8390-4014-b887-e30256b576fc {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.271108] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-457bd1eb-1282-4fb8-933d-8470e59cd35c {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.278119] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-450f2133-5e33-4a4d-9d23-b5b3225fdea6 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.291513] env[62096]: DEBUG nova.compute.provider_tree [None req-abde4e35-3d25-4bdf-ac75-6b77642d395c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 933.292035] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-abde4e35-3d25-4bdf-ac75-6b77642d395c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg 6d6d18c5aae24a32a91aaf1784077570 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 933.299723] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6d6d18c5aae24a32a91aaf1784077570 [ 933.561133] env[62096]: DEBUG oslo_vmware.api [None req-cad51331-71f6-4082-9e97-6ae9fadbb222 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Task: {'id': session[5264b2d0-7e3b-3d3c-bfd1-bd630cad6eae]521ba924-b7b2-6e8b-acb2-3710e44d4664, 'name': SearchDatastore_Task, 'duration_secs': 0.007632} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.562186] env[62096]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-03d620c5-0281-41bb-8a32-56f160748590 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.567419] env[62096]: DEBUG oslo_vmware.api [None req-cad51331-71f6-4082-9e97-6ae9fadbb222 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Waiting for the task: (returnval){ [ 933.567419] env[62096]: value = "session[5264b2d0-7e3b-3d3c-bfd1-bd630cad6eae]5235f623-e5d6-262f-4acd-a7903bfb04ff" [ 933.567419] env[62096]: _type = "Task" [ 933.567419] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.578647] env[62096]: DEBUG oslo_vmware.api [None req-cad51331-71f6-4082-9e97-6ae9fadbb222 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Task: {'id': session[5264b2d0-7e3b-3d3c-bfd1-bd630cad6eae]5235f623-e5d6-262f-4acd-a7903bfb04ff, 'name': SearchDatastore_Task, 'duration_secs': 0.00789} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.578881] env[62096]: DEBUG oslo_concurrency.lockutils [None req-cad51331-71f6-4082-9e97-6ae9fadbb222 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Releasing lock "[datastore2] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110/fb764baa-3805-45c4-a694-aa91b0932110.vmdk" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 933.579128] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [None req-cad51331-71f6-4082-9e97-6ae9fadbb222 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110/fb764baa-3805-45c4-a694-aa91b0932110.vmdk to [datastore2] dc451ce2-65de-4497-ad48-fd776f73cb80/dc451ce2-65de-4497-ad48-fd776f73cb80.vmdk {{(pid=62096) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 933.579366] env[62096]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f956e1c8-2c25-45da-b7db-567662523a8e {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.585270] env[62096]: DEBUG oslo_vmware.api [None req-cad51331-71f6-4082-9e97-6ae9fadbb222 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Waiting for the task: (returnval){ [ 933.585270] env[62096]: value = "task-397463" [ 933.585270] env[62096]: _type = "Task" [ 933.585270] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.592846] env[62096]: DEBUG oslo_vmware.api [None req-cad51331-71f6-4082-9e97-6ae9fadbb222 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Task: {'id': task-397463, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.625887] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-5865a84c-0be2-46e8-ab17-dbd444d3f325 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Expecting reply to msg bbe87b0bbb7c48f1a077d073edd00bc7 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 933.657540] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bbe87b0bbb7c48f1a077d073edd00bc7 [ 933.805772] env[62096]: DEBUG nova.scheduler.client.report [None req-abde4e35-3d25-4bdf-ac75-6b77642d395c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 933.805772] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-abde4e35-3d25-4bdf-ac75-6b77642d395c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg 4a2e223ea314400ebb36b306119f17d4 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 933.818014] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4a2e223ea314400ebb36b306119f17d4 [ 934.096097] env[62096]: DEBUG oslo_vmware.api [None req-cad51331-71f6-4082-9e97-6ae9fadbb222 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Task: {'id': task-397463, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.448327} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.096757] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [None req-cad51331-71f6-4082-9e97-6ae9fadbb222 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110/fb764baa-3805-45c4-a694-aa91b0932110.vmdk to [datastore2] dc451ce2-65de-4497-ad48-fd776f73cb80/dc451ce2-65de-4497-ad48-fd776f73cb80.vmdk {{(pid=62096) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 934.097183] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-cad51331-71f6-4082-9e97-6ae9fadbb222 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] [instance: dc451ce2-65de-4497-ad48-fd776f73cb80] Extending root virtual disk to 1048576 {{(pid=62096) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 934.097595] env[62096]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-03321c5d-26f8-41c4-a0e9-e5c5d05870b1 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.106203] env[62096]: DEBUG oslo_vmware.api [None req-cad51331-71f6-4082-9e97-6ae9fadbb222 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Waiting for the task: (returnval){ [ 934.106203] env[62096]: value = "task-397464" [ 934.106203] env[62096]: _type = "Task" [ 934.106203] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 934.115039] env[62096]: DEBUG oslo_vmware.api [None req-cad51331-71f6-4082-9e97-6ae9fadbb222 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Task: {'id': task-397464, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.130516] env[62096]: DEBUG nova.compute.manager [None req-5865a84c-0be2-46e8-ab17-dbd444d3f325 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] [instance: 9f3ce570-878f-48bf-a08c-0387b0556785] Start spawning the instance on the hypervisor. {{(pid=62096) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 934.157575] env[62096]: DEBUG nova.virt.hardware [None req-5865a84c-0be2-46e8-ab17-dbd444d3f325 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-19T11:52:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-19T11:51:55Z,direct_url=,disk_format='vmdk',id=fb764baa-3805-45c4-a694-aa91b0932110,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bd76085d004b408bbcf1ebf76ba2593e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-19T11:51:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 934.158159] env[62096]: DEBUG nova.virt.hardware [None req-5865a84c-0be2-46e8-ab17-dbd444d3f325 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Flavor limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 934.158486] env[62096]: DEBUG nova.virt.hardware [None req-5865a84c-0be2-46e8-ab17-dbd444d3f325 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Image limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 934.158834] env[62096]: DEBUG nova.virt.hardware [None req-5865a84c-0be2-46e8-ab17-dbd444d3f325 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Flavor pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 934.159291] env[62096]: DEBUG nova.virt.hardware [None req-5865a84c-0be2-46e8-ab17-dbd444d3f325 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Image pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 934.159855] env[62096]: DEBUG nova.virt.hardware [None req-5865a84c-0be2-46e8-ab17-dbd444d3f325 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 934.160360] env[62096]: DEBUG nova.virt.hardware [None req-5865a84c-0be2-46e8-ab17-dbd444d3f325 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 934.161206] env[62096]: DEBUG nova.virt.hardware [None req-5865a84c-0be2-46e8-ab17-dbd444d3f325 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 934.161654] env[62096]: DEBUG nova.virt.hardware [None req-5865a84c-0be2-46e8-ab17-dbd444d3f325 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Got 1 possible topologies {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 934.162112] env[62096]: DEBUG nova.virt.hardware [None req-5865a84c-0be2-46e8-ab17-dbd444d3f325 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 934.162464] env[62096]: DEBUG nova.virt.hardware [None req-5865a84c-0be2-46e8-ab17-dbd444d3f325 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 934.163635] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46139355-49f8-492e-b753-cc08dfc728d9 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.176363] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-763e26e3-c451-4de7-acf4-016c55faa047 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.189482] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-5865a84c-0be2-46e8-ab17-dbd444d3f325 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] [instance: 9f3ce570-878f-48bf-a08c-0387b0556785] Instance VIF info [] {{(pid=62096) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 934.194980] env[62096]: DEBUG oslo.service.loopingcall [None req-5865a84c-0be2-46e8-ab17-dbd444d3f325 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 934.195281] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9f3ce570-878f-48bf-a08c-0387b0556785] Creating VM on the ESX host {{(pid=62096) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 934.195481] env[62096]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-55533695-24fb-4a98-918c-60e1b98f109d {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.212825] env[62096]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 934.212825] env[62096]: value = "task-397465" [ 934.212825] env[62096]: _type = "Task" [ 934.212825] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 934.220985] env[62096]: DEBUG oslo_vmware.api [-] Task: {'id': task-397465, 'name': CreateVM_Task} progress is 0%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.307152] env[62096]: DEBUG oslo_concurrency.lockutils [None req-abde4e35-3d25-4bdf-ac75-6b77642d395c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.202s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 934.307752] env[62096]: DEBUG nova.compute.manager [None req-abde4e35-3d25-4bdf-ac75-6b77642d395c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] Start building networks asynchronously for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 934.309428] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-abde4e35-3d25-4bdf-ac75-6b77642d395c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg 3e96c8eca70840eb96471d4ed9f60a30 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 934.310590] env[62096]: DEBUG oslo_concurrency.lockutils [None req-064fa4d0-df95-4930-985c-d30de278cda0 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 16.083s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 934.312491] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-064fa4d0-df95-4930-985c-d30de278cda0 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Expecting reply to msg ee0bae02306c4b78a1bf0212b82ff954 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 934.344461] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3e96c8eca70840eb96471d4ed9f60a30 [ 934.345366] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ee0bae02306c4b78a1bf0212b82ff954 [ 934.616036] env[62096]: DEBUG oslo_vmware.api [None req-cad51331-71f6-4082-9e97-6ae9fadbb222 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Task: {'id': task-397464, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.052494} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.616308] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-cad51331-71f6-4082-9e97-6ae9fadbb222 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] [instance: dc451ce2-65de-4497-ad48-fd776f73cb80] Extended root virtual disk {{(pid=62096) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 934.617096] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-425a8d4a-410c-4920-bf56-f27e655f8bbb {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.635940] env[62096]: DEBUG nova.virt.vmwareapi.volumeops [None req-cad51331-71f6-4082-9e97-6ae9fadbb222 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] [instance: dc451ce2-65de-4497-ad48-fd776f73cb80] Reconfiguring VM instance instance-00000054 to attach disk [datastore2] dc451ce2-65de-4497-ad48-fd776f73cb80/dc451ce2-65de-4497-ad48-fd776f73cb80.vmdk or device None with type sparse {{(pid=62096) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 934.636260] env[62096]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7c3a74b4-2c7a-4c91-9d79-43f3d34afc27 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.655531] env[62096]: DEBUG oslo_vmware.api [None req-cad51331-71f6-4082-9e97-6ae9fadbb222 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Waiting for the task: (returnval){ [ 934.655531] env[62096]: value = "task-397466" [ 934.655531] env[62096]: _type = "Task" [ 934.655531] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 934.662638] env[62096]: DEBUG oslo_vmware.api [None req-cad51331-71f6-4082-9e97-6ae9fadbb222 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Task: {'id': task-397466, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.725805] env[62096]: DEBUG oslo_vmware.api [-] Task: {'id': task-397465, 'name': CreateVM_Task, 'duration_secs': 0.290207} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.726016] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9f3ce570-878f-48bf-a08c-0387b0556785] Created VM on the ESX host {{(pid=62096) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 934.726464] env[62096]: DEBUG oslo_concurrency.lockutils [None req-5865a84c-0be2-46e8-ab17-dbd444d3f325 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 934.726675] env[62096]: DEBUG oslo_concurrency.lockutils [None req-5865a84c-0be2-46e8-ab17-dbd444d3f325 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Acquired lock "[datastore2] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 934.727026] env[62096]: DEBUG oslo_concurrency.lockutils [None req-5865a84c-0be2-46e8-ab17-dbd444d3f325 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 934.727316] env[62096]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9c2e826e-40b4-48a7-a89e-5adc772878a3 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.738431] env[62096]: DEBUG oslo_vmware.api [None req-5865a84c-0be2-46e8-ab17-dbd444d3f325 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Waiting for the task: (returnval){ [ 934.738431] env[62096]: value = "session[5264b2d0-7e3b-3d3c-bfd1-bd630cad6eae]52b2bd6b-f9b1-552d-bebe-52df4473acf2" [ 934.738431] env[62096]: _type = "Task" [ 934.738431] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 934.746323] env[62096]: DEBUG oslo_vmware.api [None req-5865a84c-0be2-46e8-ab17-dbd444d3f325 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Task: {'id': session[5264b2d0-7e3b-3d3c-bfd1-bd630cad6eae]52b2bd6b-f9b1-552d-bebe-52df4473acf2, 'name': SearchDatastore_Task, 'duration_secs': 0.007826} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.746647] env[62096]: DEBUG oslo_concurrency.lockutils [None req-5865a84c-0be2-46e8-ab17-dbd444d3f325 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Releasing lock "[datastore2] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 934.746910] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-5865a84c-0be2-46e8-ab17-dbd444d3f325 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] [instance: 9f3ce570-878f-48bf-a08c-0387b0556785] Processing image fb764baa-3805-45c4-a694-aa91b0932110 {{(pid=62096) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 934.747214] env[62096]: DEBUG oslo_concurrency.lockutils [None req-5865a84c-0be2-46e8-ab17-dbd444d3f325 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110/fb764baa-3805-45c4-a694-aa91b0932110.vmdk" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 934.747355] env[62096]: DEBUG oslo_concurrency.lockutils [None req-5865a84c-0be2-46e8-ab17-dbd444d3f325 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Acquired lock "[datastore2] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110/fb764baa-3805-45c4-a694-aa91b0932110.vmdk" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 934.747572] env[62096]: DEBUG nova.virt.vmwareapi.ds_util [None req-5865a84c-0be2-46e8-ab17-dbd444d3f325 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62096) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 934.747848] env[62096]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8cf4a54a-ed14-44fd-8b15-a749153a649b {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.754202] env[62096]: DEBUG nova.virt.vmwareapi.ds_util [None req-5865a84c-0be2-46e8-ab17-dbd444d3f325 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62096) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 934.754468] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-5865a84c-0be2-46e8-ab17-dbd444d3f325 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62096) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 934.755210] env[62096]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-16737908-6263-4585-8ab8-8302cb5574db {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.759643] env[62096]: DEBUG oslo_vmware.api [None req-5865a84c-0be2-46e8-ab17-dbd444d3f325 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Waiting for the task: (returnval){ [ 934.759643] env[62096]: value = "session[5264b2d0-7e3b-3d3c-bfd1-bd630cad6eae]52382610-719c-c563-81e5-90f482ce8c34" [ 934.759643] env[62096]: _type = "Task" [ 934.759643] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 934.766589] env[62096]: DEBUG oslo_vmware.api [None req-5865a84c-0be2-46e8-ab17-dbd444d3f325 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Task: {'id': session[5264b2d0-7e3b-3d3c-bfd1-bd630cad6eae]52382610-719c-c563-81e5-90f482ce8c34, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.815128] env[62096]: DEBUG nova.compute.utils [None req-abde4e35-3d25-4bdf-ac75-6b77642d395c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Using /dev/sd instead of None {{(pid=62096) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 934.815792] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-abde4e35-3d25-4bdf-ac75-6b77642d395c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg 0b1122b33b9742489e6334c3d4b8e8b5 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 934.819768] env[62096]: DEBUG nova.compute.manager [None req-abde4e35-3d25-4bdf-ac75-6b77642d395c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] Allocating IP information in the background. {{(pid=62096) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 934.819979] env[62096]: DEBUG nova.network.neutron [None req-abde4e35-3d25-4bdf-ac75-6b77642d395c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] allocate_for_instance() {{(pid=62096) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 934.847992] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0b1122b33b9742489e6334c3d4b8e8b5 [ 934.870159] env[62096]: DEBUG nova.policy [None req-abde4e35-3d25-4bdf-ac75-6b77642d395c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6d00589b1aa24dd7beb7c3ac5cb2a8ca', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bffafc9dc83d477d823cd7364968f48a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62096) authorize /opt/stack/nova/nova/policy.py:203}} [ 934.922874] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-076b81e4-e967-49b9-98a9-f80b316ce337 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.930084] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-835adfc2-9a0f-4ece-abdb-37792177369b {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.964924] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-448254ca-9be1-4e2e-9995-db6dbe614159 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.972807] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b306291-d6f5-4dbc-b06a-8c9c79ce4551 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.988480] env[62096]: DEBUG nova.compute.provider_tree [None req-064fa4d0-df95-4930-985c-d30de278cda0 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Updating inventory in ProviderTree for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 934.989534] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-064fa4d0-df95-4930-985c-d30de278cda0 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Expecting reply to msg 7a0f5cbd239f4d3c9bb76c01bf33bbe6 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 934.998182] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7a0f5cbd239f4d3c9bb76c01bf33bbe6 [ 935.152989] env[62096]: DEBUG nova.network.neutron [None req-abde4e35-3d25-4bdf-ac75-6b77642d395c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] Successfully created port: 1d4e854d-9749-4e8b-9a8a-eb9a812ee623 {{(pid=62096) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 935.173311] env[62096]: DEBUG oslo_vmware.api [None req-cad51331-71f6-4082-9e97-6ae9fadbb222 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Task: {'id': task-397466, 'name': ReconfigVM_Task, 'duration_secs': 0.252735} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.173662] env[62096]: DEBUG nova.virt.vmwareapi.volumeops [None req-cad51331-71f6-4082-9e97-6ae9fadbb222 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] [instance: dc451ce2-65de-4497-ad48-fd776f73cb80] Reconfigured VM instance instance-00000054 to attach disk [datastore2] dc451ce2-65de-4497-ad48-fd776f73cb80/dc451ce2-65de-4497-ad48-fd776f73cb80.vmdk or device None with type sparse {{(pid=62096) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 935.174354] env[62096]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-abf3cc2c-e69d-41ed-a252-cd10b14a9072 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.181126] env[62096]: DEBUG oslo_vmware.api [None req-cad51331-71f6-4082-9e97-6ae9fadbb222 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Waiting for the task: (returnval){ [ 935.181126] env[62096]: value = "task-397467" [ 935.181126] env[62096]: _type = "Task" [ 935.181126] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.190645] env[62096]: DEBUG oslo_vmware.api [None req-cad51331-71f6-4082-9e97-6ae9fadbb222 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Task: {'id': task-397467, 'name': Rename_Task} progress is 5%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.270003] env[62096]: DEBUG oslo_vmware.api [None req-5865a84c-0be2-46e8-ab17-dbd444d3f325 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Task: {'id': session[5264b2d0-7e3b-3d3c-bfd1-bd630cad6eae]52382610-719c-c563-81e5-90f482ce8c34, 'name': SearchDatastore_Task, 'duration_secs': 0.00744} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.270827] env[62096]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3b2c2fc7-4a7a-4268-b0cd-1e7c4169de31 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.275915] env[62096]: DEBUG oslo_vmware.api [None req-5865a84c-0be2-46e8-ab17-dbd444d3f325 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Waiting for the task: (returnval){ [ 935.275915] env[62096]: value = "session[5264b2d0-7e3b-3d3c-bfd1-bd630cad6eae]52e117a2-7119-fb10-1c7c-983809ebd4a6" [ 935.275915] env[62096]: _type = "Task" [ 935.275915] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.283626] env[62096]: DEBUG oslo_vmware.api [None req-5865a84c-0be2-46e8-ab17-dbd444d3f325 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Task: {'id': session[5264b2d0-7e3b-3d3c-bfd1-bd630cad6eae]52e117a2-7119-fb10-1c7c-983809ebd4a6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.320562] env[62096]: DEBUG nova.compute.manager [None req-abde4e35-3d25-4bdf-ac75-6b77642d395c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] Start building block device mappings for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 935.323416] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-abde4e35-3d25-4bdf-ac75-6b77642d395c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg 8daeb33e54614e7889306aad65df8ca2 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 935.356050] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8daeb33e54614e7889306aad65df8ca2 [ 935.510059] env[62096]: ERROR nova.scheduler.client.report [None req-064fa4d0-df95-4930-985c-d30de278cda0 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] [req-d363f726-0247-4187-ac9a-af78b389cdee] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 6eefe13c-ab55-4c03-987f-47a62756c3b3. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-d363f726-0247-4187-ac9a-af78b389cdee"}]}: nova.exception.PortBindingFailed: Binding failed for port 3e05661d-2e87-4072-aba5-687246e50404, please check neutron logs for more information. [ 935.529674] env[62096]: DEBUG nova.scheduler.client.report [None req-064fa4d0-df95-4930-985c-d30de278cda0 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Refreshing inventories for resource provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 935.542416] env[62096]: DEBUG nova.scheduler.client.report [None req-064fa4d0-df95-4930-985c-d30de278cda0 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Updating ProviderTree inventory for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 935.542859] env[62096]: DEBUG nova.compute.provider_tree [None req-064fa4d0-df95-4930-985c-d30de278cda0 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Updating inventory in ProviderTree for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 935.559043] env[62096]: DEBUG nova.scheduler.client.report [None req-064fa4d0-df95-4930-985c-d30de278cda0 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Refreshing aggregate associations for resource provider 6eefe13c-ab55-4c03-987f-47a62756c3b3, aggregates: None {{(pid=62096) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 935.579040] env[62096]: DEBUG nova.scheduler.client.report [None req-064fa4d0-df95-4930-985c-d30de278cda0 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Refreshing trait associations for resource provider 6eefe13c-ab55-4c03-987f-47a62756c3b3, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO {{(pid=62096) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 935.676210] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0afcfb90-ea73-41dc-b8d7-87e2410c3d82 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.689525] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f80bb414-c871-4775-a691-89ffb5a502a4 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.698515] env[62096]: DEBUG oslo_vmware.api [None req-cad51331-71f6-4082-9e97-6ae9fadbb222 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Task: {'id': task-397467, 'name': Rename_Task, 'duration_secs': 0.15001} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.722288] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [None req-cad51331-71f6-4082-9e97-6ae9fadbb222 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] [instance: dc451ce2-65de-4497-ad48-fd776f73cb80] Powering on the VM {{(pid=62096) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 935.722849] env[62096]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e8f7271e-dbd9-4184-bdd0-9c45376d00c8 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.725029] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebccf239-0fc7-4e41-a48b-9189222f79d4 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.732052] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8986e4c6-87dd-4c9b-bc50-3cad332d77e8 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.736656] env[62096]: DEBUG oslo_vmware.api [None req-cad51331-71f6-4082-9e97-6ae9fadbb222 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Waiting for the task: (returnval){ [ 935.736656] env[62096]: value = "task-397468" [ 935.736656] env[62096]: _type = "Task" [ 935.736656] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.750696] env[62096]: DEBUG nova.compute.provider_tree [None req-064fa4d0-df95-4930-985c-d30de278cda0 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Updating inventory in ProviderTree for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 935.751414] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-064fa4d0-df95-4930-985c-d30de278cda0 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Expecting reply to msg 2adb7cd73e0c4d37a0ccd24b2c43c628 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 935.757453] env[62096]: DEBUG oslo_vmware.api [None req-cad51331-71f6-4082-9e97-6ae9fadbb222 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Task: {'id': task-397468, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.759306] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2adb7cd73e0c4d37a0ccd24b2c43c628 [ 935.784722] env[62096]: DEBUG oslo_vmware.api [None req-5865a84c-0be2-46e8-ab17-dbd444d3f325 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Task: {'id': session[5264b2d0-7e3b-3d3c-bfd1-bd630cad6eae]52e117a2-7119-fb10-1c7c-983809ebd4a6, 'name': SearchDatastore_Task, 'duration_secs': 0.022205} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.785064] env[62096]: DEBUG oslo_concurrency.lockutils [None req-5865a84c-0be2-46e8-ab17-dbd444d3f325 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Releasing lock "[datastore2] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110/fb764baa-3805-45c4-a694-aa91b0932110.vmdk" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 935.785400] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [None req-5865a84c-0be2-46e8-ab17-dbd444d3f325 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110/fb764baa-3805-45c4-a694-aa91b0932110.vmdk to [datastore2] 9f3ce570-878f-48bf-a08c-0387b0556785/9f3ce570-878f-48bf-a08c-0387b0556785.vmdk {{(pid=62096) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 935.785714] env[62096]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ad3121bb-33c8-4f62-b1d4-cde1911dc342 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.792274] env[62096]: DEBUG oslo_vmware.api [None req-5865a84c-0be2-46e8-ab17-dbd444d3f325 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Waiting for the task: (returnval){ [ 935.792274] env[62096]: value = "task-397469" [ 935.792274] env[62096]: _type = "Task" [ 935.792274] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.806702] env[62096]: DEBUG oslo_vmware.api [None req-5865a84c-0be2-46e8-ab17-dbd444d3f325 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Task: {'id': task-397469, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.829631] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-abde4e35-3d25-4bdf-ac75-6b77642d395c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg 67b8f26b989842269ced5d6918c7964b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 935.860781] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 67b8f26b989842269ced5d6918c7964b [ 936.030672] env[62096]: DEBUG nova.compute.manager [req-2112f93d-82c9-49f1-a715-5d839bca83ac req-dd03fc90-e4bd-4e3d-84e8-0ff23f08a2bd service nova] [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] Received event network-changed-1d4e854d-9749-4e8b-9a8a-eb9a812ee623 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 936.030757] env[62096]: DEBUG nova.compute.manager [req-2112f93d-82c9-49f1-a715-5d839bca83ac req-dd03fc90-e4bd-4e3d-84e8-0ff23f08a2bd service nova] [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] Refreshing instance network info cache due to event network-changed-1d4e854d-9749-4e8b-9a8a-eb9a812ee623. {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 936.031061] env[62096]: DEBUG oslo_concurrency.lockutils [req-2112f93d-82c9-49f1-a715-5d839bca83ac req-dd03fc90-e4bd-4e3d-84e8-0ff23f08a2bd service nova] Acquiring lock "refresh_cache-6b8a00ab-b6be-46ad-989d-81692d1d7556" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 936.031236] env[62096]: DEBUG oslo_concurrency.lockutils [req-2112f93d-82c9-49f1-a715-5d839bca83ac req-dd03fc90-e4bd-4e3d-84e8-0ff23f08a2bd service nova] Acquired lock "refresh_cache-6b8a00ab-b6be-46ad-989d-81692d1d7556" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 936.031406] env[62096]: DEBUG nova.network.neutron [req-2112f93d-82c9-49f1-a715-5d839bca83ac req-dd03fc90-e4bd-4e3d-84e8-0ff23f08a2bd service nova] [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] Refreshing network info cache for port 1d4e854d-9749-4e8b-9a8a-eb9a812ee623 {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 936.031882] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-2112f93d-82c9-49f1-a715-5d839bca83ac req-dd03fc90-e4bd-4e3d-84e8-0ff23f08a2bd service nova] Expecting reply to msg 48f59bc230c349d1b4e0c6a942759836 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 936.039349] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 48f59bc230c349d1b4e0c6a942759836 [ 936.187731] env[62096]: ERROR nova.compute.manager [None req-abde4e35-3d25-4bdf-ac75-6b77642d395c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 1d4e854d-9749-4e8b-9a8a-eb9a812ee623, please check neutron logs for more information. [ 936.187731] env[62096]: ERROR nova.compute.manager Traceback (most recent call last): [ 936.187731] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 936.187731] env[62096]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 936.187731] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 936.187731] env[62096]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 936.187731] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 936.187731] env[62096]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 936.187731] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 936.187731] env[62096]: ERROR nova.compute.manager self.force_reraise() [ 936.187731] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 936.187731] env[62096]: ERROR nova.compute.manager raise self.value [ 936.187731] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 936.187731] env[62096]: ERROR nova.compute.manager updated_port = self._update_port( [ 936.187731] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 936.187731] env[62096]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 936.188440] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 936.188440] env[62096]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 936.188440] env[62096]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 1d4e854d-9749-4e8b-9a8a-eb9a812ee623, please check neutron logs for more information. [ 936.188440] env[62096]: ERROR nova.compute.manager [ 936.188440] env[62096]: Traceback (most recent call last): [ 936.188440] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 936.188440] env[62096]: listener.cb(fileno) [ 936.188440] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 936.188440] env[62096]: result = function(*args, **kwargs) [ 936.188440] env[62096]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 936.188440] env[62096]: return func(*args, **kwargs) [ 936.188440] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 936.188440] env[62096]: raise e [ 936.188440] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 936.188440] env[62096]: nwinfo = self.network_api.allocate_for_instance( [ 936.188440] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 936.188440] env[62096]: created_port_ids = self._update_ports_for_instance( [ 936.188440] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 936.188440] env[62096]: with excutils.save_and_reraise_exception(): [ 936.188440] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 936.188440] env[62096]: self.force_reraise() [ 936.188440] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 936.188440] env[62096]: raise self.value [ 936.188440] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 936.188440] env[62096]: updated_port = self._update_port( [ 936.188440] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 936.188440] env[62096]: _ensure_no_port_binding_failure(port) [ 936.188440] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 936.188440] env[62096]: raise exception.PortBindingFailed(port_id=port['id']) [ 936.189362] env[62096]: nova.exception.PortBindingFailed: Binding failed for port 1d4e854d-9749-4e8b-9a8a-eb9a812ee623, please check neutron logs for more information. [ 936.189362] env[62096]: Removing descriptor: 16 [ 936.247121] env[62096]: DEBUG oslo_vmware.api [None req-cad51331-71f6-4082-9e97-6ae9fadbb222 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Task: {'id': task-397468, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.282463] env[62096]: DEBUG nova.scheduler.client.report [None req-064fa4d0-df95-4930-985c-d30de278cda0 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Updated inventory for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 with generation 104 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:957}} [ 936.282743] env[62096]: DEBUG nova.compute.provider_tree [None req-064fa4d0-df95-4930-985c-d30de278cda0 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Updating resource provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 generation from 104 to 105 during operation: update_inventory {{(pid=62096) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 936.282939] env[62096]: DEBUG nova.compute.provider_tree [None req-064fa4d0-df95-4930-985c-d30de278cda0 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Updating inventory in ProviderTree for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 936.285439] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-064fa4d0-df95-4930-985c-d30de278cda0 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Expecting reply to msg 633aba13ced542f6896a603edd8a4d3a in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 936.302776] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 633aba13ced542f6896a603edd8a4d3a [ 936.303175] env[62096]: DEBUG oslo_vmware.api [None req-5865a84c-0be2-46e8-ab17-dbd444d3f325 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Task: {'id': task-397469, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.424044} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.303406] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [None req-5865a84c-0be2-46e8-ab17-dbd444d3f325 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110/fb764baa-3805-45c4-a694-aa91b0932110.vmdk to [datastore2] 9f3ce570-878f-48bf-a08c-0387b0556785/9f3ce570-878f-48bf-a08c-0387b0556785.vmdk {{(pid=62096) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 936.303626] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-5865a84c-0be2-46e8-ab17-dbd444d3f325 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] [instance: 9f3ce570-878f-48bf-a08c-0387b0556785] Extending root virtual disk to 1048576 {{(pid=62096) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 936.303842] env[62096]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-122c9f78-32d9-425f-aef4-af24f3b4ff8a {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.310320] env[62096]: DEBUG oslo_vmware.api [None req-5865a84c-0be2-46e8-ab17-dbd444d3f325 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Waiting for the task: (returnval){ [ 936.310320] env[62096]: value = "task-397470" [ 936.310320] env[62096]: _type = "Task" [ 936.310320] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.318174] env[62096]: DEBUG oslo_vmware.api [None req-5865a84c-0be2-46e8-ab17-dbd444d3f325 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Task: {'id': task-397470, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.331808] env[62096]: DEBUG nova.compute.manager [None req-abde4e35-3d25-4bdf-ac75-6b77642d395c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] Start spawning the instance on the hypervisor. {{(pid=62096) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 936.357067] env[62096]: DEBUG nova.virt.hardware [None req-abde4e35-3d25-4bdf-ac75-6b77642d395c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-19T11:52:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-19T11:51:55Z,direct_url=,disk_format='vmdk',id=fb764baa-3805-45c4-a694-aa91b0932110,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bd76085d004b408bbcf1ebf76ba2593e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-19T11:51:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 936.357324] env[62096]: DEBUG nova.virt.hardware [None req-abde4e35-3d25-4bdf-ac75-6b77642d395c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Flavor limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 936.357492] env[62096]: DEBUG nova.virt.hardware [None req-abde4e35-3d25-4bdf-ac75-6b77642d395c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Image limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 936.357676] env[62096]: DEBUG nova.virt.hardware [None req-abde4e35-3d25-4bdf-ac75-6b77642d395c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Flavor pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 936.357835] env[62096]: DEBUG nova.virt.hardware [None req-abde4e35-3d25-4bdf-ac75-6b77642d395c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Image pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 936.357992] env[62096]: DEBUG nova.virt.hardware [None req-abde4e35-3d25-4bdf-ac75-6b77642d395c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 936.358209] env[62096]: DEBUG nova.virt.hardware [None req-abde4e35-3d25-4bdf-ac75-6b77642d395c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 936.358378] env[62096]: DEBUG nova.virt.hardware [None req-abde4e35-3d25-4bdf-ac75-6b77642d395c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 936.358544] env[62096]: DEBUG nova.virt.hardware [None req-abde4e35-3d25-4bdf-ac75-6b77642d395c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Got 1 possible topologies {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 936.358724] env[62096]: DEBUG nova.virt.hardware [None req-abde4e35-3d25-4bdf-ac75-6b77642d395c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 936.358909] env[62096]: DEBUG nova.virt.hardware [None req-abde4e35-3d25-4bdf-ac75-6b77642d395c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 936.359751] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1295f2cc-3227-4d4d-8eb7-e0389ab4b2a6 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.367720] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8949077f-8254-4c77-bc97-a83279f8dfff {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.380608] env[62096]: ERROR nova.compute.manager [None req-abde4e35-3d25-4bdf-ac75-6b77642d395c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 1d4e854d-9749-4e8b-9a8a-eb9a812ee623, please check neutron logs for more information. [ 936.380608] env[62096]: ERROR nova.compute.manager [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] Traceback (most recent call last): [ 936.380608] env[62096]: ERROR nova.compute.manager [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 936.380608] env[62096]: ERROR nova.compute.manager [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] yield resources [ 936.380608] env[62096]: ERROR nova.compute.manager [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 936.380608] env[62096]: ERROR nova.compute.manager [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] self.driver.spawn(context, instance, image_meta, [ 936.380608] env[62096]: ERROR nova.compute.manager [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 936.380608] env[62096]: ERROR nova.compute.manager [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] self._vmops.spawn(context, instance, image_meta, injected_files, [ 936.380608] env[62096]: ERROR nova.compute.manager [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 936.380608] env[62096]: ERROR nova.compute.manager [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] vm_ref = self.build_virtual_machine(instance, [ 936.380608] env[62096]: ERROR nova.compute.manager [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 936.381042] env[62096]: ERROR nova.compute.manager [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] vif_infos = vmwarevif.get_vif_info(self._session, [ 936.381042] env[62096]: ERROR nova.compute.manager [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 936.381042] env[62096]: ERROR nova.compute.manager [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] for vif in network_info: [ 936.381042] env[62096]: ERROR nova.compute.manager [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 936.381042] env[62096]: ERROR nova.compute.manager [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] return self._sync_wrapper(fn, *args, **kwargs) [ 936.381042] env[62096]: ERROR nova.compute.manager [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 936.381042] env[62096]: ERROR nova.compute.manager [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] self.wait() [ 936.381042] env[62096]: ERROR nova.compute.manager [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 936.381042] env[62096]: ERROR nova.compute.manager [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] self[:] = self._gt.wait() [ 936.381042] env[62096]: ERROR nova.compute.manager [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 936.381042] env[62096]: ERROR nova.compute.manager [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] return self._exit_event.wait() [ 936.381042] env[62096]: ERROR nova.compute.manager [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 936.381042] env[62096]: ERROR nova.compute.manager [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] current.throw(*self._exc) [ 936.381460] env[62096]: ERROR nova.compute.manager [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 936.381460] env[62096]: ERROR nova.compute.manager [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] result = function(*args, **kwargs) [ 936.381460] env[62096]: ERROR nova.compute.manager [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 936.381460] env[62096]: ERROR nova.compute.manager [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] return func(*args, **kwargs) [ 936.381460] env[62096]: ERROR nova.compute.manager [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 936.381460] env[62096]: ERROR nova.compute.manager [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] raise e [ 936.381460] env[62096]: ERROR nova.compute.manager [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 936.381460] env[62096]: ERROR nova.compute.manager [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] nwinfo = self.network_api.allocate_for_instance( [ 936.381460] env[62096]: ERROR nova.compute.manager [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 936.381460] env[62096]: ERROR nova.compute.manager [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] created_port_ids = self._update_ports_for_instance( [ 936.381460] env[62096]: ERROR nova.compute.manager [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 936.381460] env[62096]: ERROR nova.compute.manager [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] with excutils.save_and_reraise_exception(): [ 936.381460] env[62096]: ERROR nova.compute.manager [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 936.381859] env[62096]: ERROR nova.compute.manager [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] self.force_reraise() [ 936.381859] env[62096]: ERROR nova.compute.manager [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 936.381859] env[62096]: ERROR nova.compute.manager [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] raise self.value [ 936.381859] env[62096]: ERROR nova.compute.manager [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 936.381859] env[62096]: ERROR nova.compute.manager [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] updated_port = self._update_port( [ 936.381859] env[62096]: ERROR nova.compute.manager [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 936.381859] env[62096]: ERROR nova.compute.manager [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] _ensure_no_port_binding_failure(port) [ 936.381859] env[62096]: ERROR nova.compute.manager [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 936.381859] env[62096]: ERROR nova.compute.manager [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] raise exception.PortBindingFailed(port_id=port['id']) [ 936.381859] env[62096]: ERROR nova.compute.manager [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] nova.exception.PortBindingFailed: Binding failed for port 1d4e854d-9749-4e8b-9a8a-eb9a812ee623, please check neutron logs for more information. [ 936.381859] env[62096]: ERROR nova.compute.manager [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] [ 936.381859] env[62096]: INFO nova.compute.manager [None req-abde4e35-3d25-4bdf-ac75-6b77642d395c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] Terminating instance [ 936.382904] env[62096]: DEBUG oslo_concurrency.lockutils [None req-abde4e35-3d25-4bdf-ac75-6b77642d395c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Acquiring lock "refresh_cache-6b8a00ab-b6be-46ad-989d-81692d1d7556" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 936.548778] env[62096]: DEBUG nova.network.neutron [req-2112f93d-82c9-49f1-a715-5d839bca83ac req-dd03fc90-e4bd-4e3d-84e8-0ff23f08a2bd service nova] [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 936.627586] env[62096]: DEBUG nova.network.neutron [req-2112f93d-82c9-49f1-a715-5d839bca83ac req-dd03fc90-e4bd-4e3d-84e8-0ff23f08a2bd service nova] [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 936.628103] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-2112f93d-82c9-49f1-a715-5d839bca83ac req-dd03fc90-e4bd-4e3d-84e8-0ff23f08a2bd service nova] Expecting reply to msg df0e1b2923b44e29ad233d5950ca2866 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 936.636217] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg df0e1b2923b44e29ad233d5950ca2866 [ 936.747659] env[62096]: DEBUG oslo_vmware.api [None req-cad51331-71f6-4082-9e97-6ae9fadbb222 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Task: {'id': task-397468, 'name': PowerOnVM_Task, 'duration_secs': 0.639019} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.747934] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [None req-cad51331-71f6-4082-9e97-6ae9fadbb222 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] [instance: dc451ce2-65de-4497-ad48-fd776f73cb80] Powered on the VM {{(pid=62096) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 936.748151] env[62096]: INFO nova.compute.manager [None req-cad51331-71f6-4082-9e97-6ae9fadbb222 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] [instance: dc451ce2-65de-4497-ad48-fd776f73cb80] Took 4.84 seconds to spawn the instance on the hypervisor. [ 936.748328] env[62096]: DEBUG nova.compute.manager [None req-cad51331-71f6-4082-9e97-6ae9fadbb222 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] [instance: dc451ce2-65de-4497-ad48-fd776f73cb80] Checking state {{(pid=62096) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 936.749091] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-730f7bdc-3e62-4238-9592-b4b852b3d3f7 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.758690] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-cad51331-71f6-4082-9e97-6ae9fadbb222 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Expecting reply to msg bc90d63433814846b4454e51762dbb06 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 936.788191] env[62096]: DEBUG oslo_concurrency.lockutils [None req-064fa4d0-df95-4930-985c-d30de278cda0 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.478s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 936.788809] env[62096]: ERROR nova.compute.manager [None req-064fa4d0-df95-4930-985c-d30de278cda0 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] [instance: 8802af61-3692-4e27-85f4-945afccff231] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 3e05661d-2e87-4072-aba5-687246e50404, please check neutron logs for more information. [ 936.788809] env[62096]: ERROR nova.compute.manager [instance: 8802af61-3692-4e27-85f4-945afccff231] Traceback (most recent call last): [ 936.788809] env[62096]: ERROR nova.compute.manager [instance: 8802af61-3692-4e27-85f4-945afccff231] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 936.788809] env[62096]: ERROR nova.compute.manager [instance: 8802af61-3692-4e27-85f4-945afccff231] self.driver.spawn(context, instance, image_meta, [ 936.788809] env[62096]: ERROR nova.compute.manager [instance: 8802af61-3692-4e27-85f4-945afccff231] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 936.788809] env[62096]: ERROR nova.compute.manager [instance: 8802af61-3692-4e27-85f4-945afccff231] self._vmops.spawn(context, instance, image_meta, injected_files, [ 936.788809] env[62096]: ERROR nova.compute.manager [instance: 8802af61-3692-4e27-85f4-945afccff231] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 936.788809] env[62096]: ERROR nova.compute.manager [instance: 8802af61-3692-4e27-85f4-945afccff231] vm_ref = self.build_virtual_machine(instance, [ 936.788809] env[62096]: ERROR nova.compute.manager [instance: 8802af61-3692-4e27-85f4-945afccff231] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 936.788809] env[62096]: ERROR nova.compute.manager [instance: 8802af61-3692-4e27-85f4-945afccff231] vif_infos = vmwarevif.get_vif_info(self._session, [ 936.788809] env[62096]: ERROR nova.compute.manager [instance: 8802af61-3692-4e27-85f4-945afccff231] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 936.789139] env[62096]: ERROR nova.compute.manager [instance: 8802af61-3692-4e27-85f4-945afccff231] for vif in network_info: [ 936.789139] env[62096]: ERROR nova.compute.manager [instance: 8802af61-3692-4e27-85f4-945afccff231] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 936.789139] env[62096]: ERROR nova.compute.manager [instance: 8802af61-3692-4e27-85f4-945afccff231] return self._sync_wrapper(fn, *args, **kwargs) [ 936.789139] env[62096]: ERROR nova.compute.manager [instance: 8802af61-3692-4e27-85f4-945afccff231] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 936.789139] env[62096]: ERROR nova.compute.manager [instance: 8802af61-3692-4e27-85f4-945afccff231] self.wait() [ 936.789139] env[62096]: ERROR nova.compute.manager [instance: 8802af61-3692-4e27-85f4-945afccff231] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 936.789139] env[62096]: ERROR nova.compute.manager [instance: 8802af61-3692-4e27-85f4-945afccff231] self[:] = self._gt.wait() [ 936.789139] env[62096]: ERROR nova.compute.manager [instance: 8802af61-3692-4e27-85f4-945afccff231] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 936.789139] env[62096]: ERROR nova.compute.manager [instance: 8802af61-3692-4e27-85f4-945afccff231] return self._exit_event.wait() [ 936.789139] env[62096]: ERROR nova.compute.manager [instance: 8802af61-3692-4e27-85f4-945afccff231] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 936.789139] env[62096]: ERROR nova.compute.manager [instance: 8802af61-3692-4e27-85f4-945afccff231] current.throw(*self._exc) [ 936.789139] env[62096]: ERROR nova.compute.manager [instance: 8802af61-3692-4e27-85f4-945afccff231] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 936.789139] env[62096]: ERROR nova.compute.manager [instance: 8802af61-3692-4e27-85f4-945afccff231] result = function(*args, **kwargs) [ 936.789484] env[62096]: ERROR nova.compute.manager [instance: 8802af61-3692-4e27-85f4-945afccff231] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 936.789484] env[62096]: ERROR nova.compute.manager [instance: 8802af61-3692-4e27-85f4-945afccff231] return func(*args, **kwargs) [ 936.789484] env[62096]: ERROR nova.compute.manager [instance: 8802af61-3692-4e27-85f4-945afccff231] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 936.789484] env[62096]: ERROR nova.compute.manager [instance: 8802af61-3692-4e27-85f4-945afccff231] raise e [ 936.789484] env[62096]: ERROR nova.compute.manager [instance: 8802af61-3692-4e27-85f4-945afccff231] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 936.789484] env[62096]: ERROR nova.compute.manager [instance: 8802af61-3692-4e27-85f4-945afccff231] nwinfo = self.network_api.allocate_for_instance( [ 936.789484] env[62096]: ERROR nova.compute.manager [instance: 8802af61-3692-4e27-85f4-945afccff231] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 936.789484] env[62096]: ERROR nova.compute.manager [instance: 8802af61-3692-4e27-85f4-945afccff231] created_port_ids = self._update_ports_for_instance( [ 936.789484] env[62096]: ERROR nova.compute.manager [instance: 8802af61-3692-4e27-85f4-945afccff231] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 936.789484] env[62096]: ERROR nova.compute.manager [instance: 8802af61-3692-4e27-85f4-945afccff231] with excutils.save_and_reraise_exception(): [ 936.789484] env[62096]: ERROR nova.compute.manager [instance: 8802af61-3692-4e27-85f4-945afccff231] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 936.789484] env[62096]: ERROR nova.compute.manager [instance: 8802af61-3692-4e27-85f4-945afccff231] self.force_reraise() [ 936.789484] env[62096]: ERROR nova.compute.manager [instance: 8802af61-3692-4e27-85f4-945afccff231] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 936.789821] env[62096]: ERROR nova.compute.manager [instance: 8802af61-3692-4e27-85f4-945afccff231] raise self.value [ 936.789821] env[62096]: ERROR nova.compute.manager [instance: 8802af61-3692-4e27-85f4-945afccff231] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 936.789821] env[62096]: ERROR nova.compute.manager [instance: 8802af61-3692-4e27-85f4-945afccff231] updated_port = self._update_port( [ 936.789821] env[62096]: ERROR nova.compute.manager [instance: 8802af61-3692-4e27-85f4-945afccff231] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 936.789821] env[62096]: ERROR nova.compute.manager [instance: 8802af61-3692-4e27-85f4-945afccff231] _ensure_no_port_binding_failure(port) [ 936.789821] env[62096]: ERROR nova.compute.manager [instance: 8802af61-3692-4e27-85f4-945afccff231] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 936.789821] env[62096]: ERROR nova.compute.manager [instance: 8802af61-3692-4e27-85f4-945afccff231] raise exception.PortBindingFailed(port_id=port['id']) [ 936.789821] env[62096]: ERROR nova.compute.manager [instance: 8802af61-3692-4e27-85f4-945afccff231] nova.exception.PortBindingFailed: Binding failed for port 3e05661d-2e87-4072-aba5-687246e50404, please check neutron logs for more information. [ 936.789821] env[62096]: ERROR nova.compute.manager [instance: 8802af61-3692-4e27-85f4-945afccff231] [ 936.789821] env[62096]: DEBUG nova.compute.utils [None req-064fa4d0-df95-4930-985c-d30de278cda0 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] [instance: 8802af61-3692-4e27-85f4-945afccff231] Binding failed for port 3e05661d-2e87-4072-aba5-687246e50404, please check neutron logs for more information. {{(pid=62096) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 936.790679] env[62096]: DEBUG oslo_concurrency.lockutils [None req-ee01a51e-dfc7-48ec-b8ca-f308df27a4d0 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.860s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 936.792106] env[62096]: INFO nova.compute.claims [None req-ee01a51e-dfc7-48ec-b8ca-f308df27a4d0 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 936.793661] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee01a51e-dfc7-48ec-b8ca-f308df27a4d0 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Expecting reply to msg 46037948a2004bce9b60339c33913a68 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 936.794869] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bc90d63433814846b4454e51762dbb06 [ 936.795407] env[62096]: DEBUG nova.compute.manager [None req-064fa4d0-df95-4930-985c-d30de278cda0 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] [instance: 8802af61-3692-4e27-85f4-945afccff231] Build of instance 8802af61-3692-4e27-85f4-945afccff231 was re-scheduled: Binding failed for port 3e05661d-2e87-4072-aba5-687246e50404, please check neutron logs for more information. {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 936.796023] env[62096]: DEBUG nova.compute.manager [None req-064fa4d0-df95-4930-985c-d30de278cda0 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] [instance: 8802af61-3692-4e27-85f4-945afccff231] Unplugging VIFs for instance {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 936.796118] env[62096]: DEBUG oslo_concurrency.lockutils [None req-064fa4d0-df95-4930-985c-d30de278cda0 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Acquiring lock "refresh_cache-8802af61-3692-4e27-85f4-945afccff231" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 936.796205] env[62096]: DEBUG oslo_concurrency.lockutils [None req-064fa4d0-df95-4930-985c-d30de278cda0 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Acquired lock "refresh_cache-8802af61-3692-4e27-85f4-945afccff231" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 936.796371] env[62096]: DEBUG nova.network.neutron [None req-064fa4d0-df95-4930-985c-d30de278cda0 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] [instance: 8802af61-3692-4e27-85f4-945afccff231] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 936.796739] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-064fa4d0-df95-4930-985c-d30de278cda0 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Expecting reply to msg 5c482fe2280d4589af0e658200158419 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 936.802394] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5c482fe2280d4589af0e658200158419 [ 936.818840] env[62096]: DEBUG oslo_vmware.api [None req-5865a84c-0be2-46e8-ab17-dbd444d3f325 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Task: {'id': task-397470, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.059761} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.819353] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-5865a84c-0be2-46e8-ab17-dbd444d3f325 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] [instance: 9f3ce570-878f-48bf-a08c-0387b0556785] Extended root virtual disk {{(pid=62096) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 936.820129] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da7961a9-8fe9-4b0f-806d-94e4e4cb983c {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.839562] env[62096]: DEBUG nova.virt.vmwareapi.volumeops [None req-5865a84c-0be2-46e8-ab17-dbd444d3f325 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] [instance: 9f3ce570-878f-48bf-a08c-0387b0556785] Reconfiguring VM instance instance-00000055 to attach disk [datastore2] 9f3ce570-878f-48bf-a08c-0387b0556785/9f3ce570-878f-48bf-a08c-0387b0556785.vmdk or device None with type sparse {{(pid=62096) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 936.840213] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 46037948a2004bce9b60339c33913a68 [ 936.840577] env[62096]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ef561b51-6619-41f0-b1da-68f44365ef7e {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.859885] env[62096]: DEBUG oslo_vmware.api [None req-5865a84c-0be2-46e8-ab17-dbd444d3f325 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Waiting for the task: (returnval){ [ 936.859885] env[62096]: value = "task-397471" [ 936.859885] env[62096]: _type = "Task" [ 936.859885] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.867441] env[62096]: DEBUG oslo_vmware.api [None req-5865a84c-0be2-46e8-ab17-dbd444d3f325 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Task: {'id': task-397471, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.131512] env[62096]: DEBUG oslo_concurrency.lockutils [req-2112f93d-82c9-49f1-a715-5d839bca83ac req-dd03fc90-e4bd-4e3d-84e8-0ff23f08a2bd service nova] Releasing lock "refresh_cache-6b8a00ab-b6be-46ad-989d-81692d1d7556" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 937.131933] env[62096]: DEBUG oslo_concurrency.lockutils [None req-abde4e35-3d25-4bdf-ac75-6b77642d395c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Acquired lock "refresh_cache-6b8a00ab-b6be-46ad-989d-81692d1d7556" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 937.132163] env[62096]: DEBUG nova.network.neutron [None req-abde4e35-3d25-4bdf-ac75-6b77642d395c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 937.132610] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-abde4e35-3d25-4bdf-ac75-6b77642d395c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg 0ce64287a05f45e5a53aed4b6aa2935c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 937.139670] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0ce64287a05f45e5a53aed4b6aa2935c [ 937.268446] env[62096]: INFO nova.compute.manager [None req-cad51331-71f6-4082-9e97-6ae9fadbb222 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] [instance: dc451ce2-65de-4497-ad48-fd776f73cb80] Took 25.88 seconds to build instance. [ 937.268798] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-cad51331-71f6-4082-9e97-6ae9fadbb222 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Expecting reply to msg 6eb03451134a44ec9741c10ef7382d0b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 937.281694] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6eb03451134a44ec9741c10ef7382d0b [ 937.299353] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee01a51e-dfc7-48ec-b8ca-f308df27a4d0 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Expecting reply to msg 6cfad738d2984c53920efe40616c67d9 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 937.307494] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6cfad738d2984c53920efe40616c67d9 [ 937.315368] env[62096]: DEBUG nova.network.neutron [None req-064fa4d0-df95-4930-985c-d30de278cda0 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] [instance: 8802af61-3692-4e27-85f4-945afccff231] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 937.371965] env[62096]: DEBUG oslo_vmware.api [None req-5865a84c-0be2-46e8-ab17-dbd444d3f325 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Task: {'id': task-397471, 'name': ReconfigVM_Task, 'duration_secs': 0.254285} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.372460] env[62096]: DEBUG nova.virt.vmwareapi.volumeops [None req-5865a84c-0be2-46e8-ab17-dbd444d3f325 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] [instance: 9f3ce570-878f-48bf-a08c-0387b0556785] Reconfigured VM instance instance-00000055 to attach disk [datastore2] 9f3ce570-878f-48bf-a08c-0387b0556785/9f3ce570-878f-48bf-a08c-0387b0556785.vmdk or device None with type sparse {{(pid=62096) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 937.373189] env[62096]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bfaaf6fc-ef26-4cb9-ac5e-e941879f0b9c {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.379489] env[62096]: DEBUG oslo_vmware.api [None req-5865a84c-0be2-46e8-ab17-dbd444d3f325 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Waiting for the task: (returnval){ [ 937.379489] env[62096]: value = "task-397472" [ 937.379489] env[62096]: _type = "Task" [ 937.379489] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.387173] env[62096]: DEBUG oslo_vmware.api [None req-5865a84c-0be2-46e8-ab17-dbd444d3f325 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Task: {'id': task-397472, 'name': Rename_Task} progress is 5%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.400255] env[62096]: DEBUG nova.network.neutron [None req-064fa4d0-df95-4930-985c-d30de278cda0 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] [instance: 8802af61-3692-4e27-85f4-945afccff231] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 937.400771] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-064fa4d0-df95-4930-985c-d30de278cda0 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Expecting reply to msg 291ac4492af04647bde47302e3e157e1 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 937.408698] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 291ac4492af04647bde47302e3e157e1 [ 937.647936] env[62096]: DEBUG nova.network.neutron [None req-abde4e35-3d25-4bdf-ac75-6b77642d395c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 937.711043] env[62096]: DEBUG nova.network.neutron [None req-abde4e35-3d25-4bdf-ac75-6b77642d395c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 937.711635] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-abde4e35-3d25-4bdf-ac75-6b77642d395c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg 9a1c11e700e04eb68f748041c8f4ebb2 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 937.719884] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9a1c11e700e04eb68f748041c8f4ebb2 [ 937.771265] env[62096]: DEBUG oslo_concurrency.lockutils [None req-cad51331-71f6-4082-9e97-6ae9fadbb222 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Lock "dc451ce2-65de-4497-ad48-fd776f73cb80" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.224s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 937.891396] env[62096]: DEBUG oslo_vmware.api [None req-5865a84c-0be2-46e8-ab17-dbd444d3f325 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Task: {'id': task-397472, 'name': Rename_Task, 'duration_secs': 0.132528} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.892278] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [None req-5865a84c-0be2-46e8-ab17-dbd444d3f325 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] [instance: 9f3ce570-878f-48bf-a08c-0387b0556785] Powering on the VM {{(pid=62096) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 937.892278] env[62096]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-375a2d49-ce99-4618-8192-cb1ced3af454 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.897365] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c64d49e-64c4-404b-afb3-0ae1b411ded2 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.900825] env[62096]: DEBUG oslo_vmware.api [None req-5865a84c-0be2-46e8-ab17-dbd444d3f325 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Waiting for the task: (returnval){ [ 937.900825] env[62096]: value = "task-397473" [ 937.900825] env[62096]: _type = "Task" [ 937.900825] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.905845] env[62096]: DEBUG oslo_concurrency.lockutils [None req-064fa4d0-df95-4930-985c-d30de278cda0 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Releasing lock "refresh_cache-8802af61-3692-4e27-85f4-945afccff231" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 937.906064] env[62096]: DEBUG nova.compute.manager [None req-064fa4d0-df95-4930-985c-d30de278cda0 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 937.906247] env[62096]: DEBUG nova.compute.manager [None req-064fa4d0-df95-4930-985c-d30de278cda0 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] [instance: 8802af61-3692-4e27-85f4-945afccff231] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 937.906414] env[62096]: DEBUG nova.network.neutron [None req-064fa4d0-df95-4930-985c-d30de278cda0 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] [instance: 8802af61-3692-4e27-85f4-945afccff231] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 937.909650] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7996527-7336-481a-b8a0-664ae1c4c347 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.916644] env[62096]: DEBUG oslo_vmware.api [None req-5865a84c-0be2-46e8-ab17-dbd444d3f325 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Task: {'id': task-397473, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.942172] env[62096]: DEBUG nova.network.neutron [None req-064fa4d0-df95-4930-985c-d30de278cda0 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] [instance: 8802af61-3692-4e27-85f4-945afccff231] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 937.942769] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-064fa4d0-df95-4930-985c-d30de278cda0 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Expecting reply to msg 14b93137de1a46a589b7186ca8aab9e9 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 937.944116] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d34ec3d-dbad-4fdb-b360-f03d2e048d3a {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.951311] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bf02156-1ea9-472c-8f2c-02630976c84a {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.955414] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 14b93137de1a46a589b7186ca8aab9e9 [ 937.964871] env[62096]: DEBUG nova.compute.provider_tree [None req-ee01a51e-dfc7-48ec-b8ca-f308df27a4d0 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 937.965366] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee01a51e-dfc7-48ec-b8ca-f308df27a4d0 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Expecting reply to msg 4ad0c6094f4d478d8953a47a5a185152 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 937.975567] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4ad0c6094f4d478d8953a47a5a185152 [ 938.055450] env[62096]: DEBUG nova.compute.manager [req-755f5159-d66a-4cbf-94d2-c856ca65e917 req-c6fc56be-d9d6-4b10-b44d-9b4e94954bba service nova] [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] Received event network-vif-deleted-1d4e854d-9749-4e8b-9a8a-eb9a812ee623 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 938.214259] env[62096]: DEBUG oslo_concurrency.lockutils [None req-abde4e35-3d25-4bdf-ac75-6b77642d395c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Releasing lock "refresh_cache-6b8a00ab-b6be-46ad-989d-81692d1d7556" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 938.214614] env[62096]: DEBUG nova.compute.manager [None req-abde4e35-3d25-4bdf-ac75-6b77642d395c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] Start destroying the instance on the hypervisor. {{(pid=62096) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 938.214807] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-abde4e35-3d25-4bdf-ac75-6b77642d395c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] Destroying instance {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 938.215116] env[62096]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-02048e6c-3840-4961-9b28-5762df8ae08f {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.224216] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41985f83-1be3-40f8-b333-4d6504b9f29c {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.245701] env[62096]: WARNING nova.virt.vmwareapi.vmops [None req-abde4e35-3d25-4bdf-ac75-6b77642d395c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 6b8a00ab-b6be-46ad-989d-81692d1d7556 could not be found. [ 938.245902] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-abde4e35-3d25-4bdf-ac75-6b77642d395c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] Instance destroyed {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 938.246081] env[62096]: INFO nova.compute.manager [None req-abde4e35-3d25-4bdf-ac75-6b77642d395c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] Took 0.03 seconds to destroy the instance on the hypervisor. [ 938.246341] env[62096]: DEBUG oslo.service.loopingcall [None req-abde4e35-3d25-4bdf-ac75-6b77642d395c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 938.246572] env[62096]: DEBUG nova.compute.manager [-] [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 938.246672] env[62096]: DEBUG nova.network.neutron [-] [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 938.352723] env[62096]: DEBUG nova.network.neutron [-] [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 938.353259] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 9a5b129bb6084fe1819117a6efb99d77 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 938.359882] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9a5b129bb6084fe1819117a6efb99d77 [ 938.410704] env[62096]: DEBUG oslo_vmware.api [None req-5865a84c-0be2-46e8-ab17-dbd444d3f325 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Task: {'id': task-397473, 'name': PowerOnVM_Task, 'duration_secs': 0.397656} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.410962] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [None req-5865a84c-0be2-46e8-ab17-dbd444d3f325 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] [instance: 9f3ce570-878f-48bf-a08c-0387b0556785] Powered on the VM {{(pid=62096) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 938.411157] env[62096]: INFO nova.compute.manager [None req-5865a84c-0be2-46e8-ab17-dbd444d3f325 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] [instance: 9f3ce570-878f-48bf-a08c-0387b0556785] Took 4.28 seconds to spawn the instance on the hypervisor. [ 938.411334] env[62096]: DEBUG nova.compute.manager [None req-5865a84c-0be2-46e8-ab17-dbd444d3f325 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] [instance: 9f3ce570-878f-48bf-a08c-0387b0556785] Checking state {{(pid=62096) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 938.412090] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56038d1e-874b-4e65-9771-81a488d2db77 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.419439] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-5865a84c-0be2-46e8-ab17-dbd444d3f325 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Expecting reply to msg 325bfa19777b4cefbeb44a7daf467e14 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 938.447595] env[62096]: DEBUG nova.network.neutron [None req-064fa4d0-df95-4930-985c-d30de278cda0 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] [instance: 8802af61-3692-4e27-85f4-945afccff231] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 938.448063] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-064fa4d0-df95-4930-985c-d30de278cda0 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Expecting reply to msg 17ebdaeae8c8443bbf2715c90eb2f4b1 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 938.449147] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 325bfa19777b4cefbeb44a7daf467e14 [ 938.455539] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 17ebdaeae8c8443bbf2715c90eb2f4b1 [ 938.468564] env[62096]: DEBUG nova.scheduler.client.report [None req-ee01a51e-dfc7-48ec-b8ca-f308df27a4d0 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 938.470916] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee01a51e-dfc7-48ec-b8ca-f308df27a4d0 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Expecting reply to msg 73508eafdc0649248c483e50f7e1e446 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 938.482168] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 73508eafdc0649248c483e50f7e1e446 [ 938.857349] env[62096]: DEBUG nova.network.neutron [-] [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 938.857825] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 6e246496c57c42069d5daaa5c12c87f6 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 938.867363] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6e246496c57c42069d5daaa5c12c87f6 [ 938.928645] env[62096]: INFO nova.compute.manager [None req-5865a84c-0be2-46e8-ab17-dbd444d3f325 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] [instance: 9f3ce570-878f-48bf-a08c-0387b0556785] Took 25.80 seconds to build instance. [ 938.928831] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-5865a84c-0be2-46e8-ab17-dbd444d3f325 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Expecting reply to msg 87ae5f1f8dcf45719f9b5ab4ade2fd66 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 938.939156] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 87ae5f1f8dcf45719f9b5ab4ade2fd66 [ 938.950903] env[62096]: INFO nova.compute.manager [None req-064fa4d0-df95-4930-985c-d30de278cda0 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] [instance: 8802af61-3692-4e27-85f4-945afccff231] Took 1.04 seconds to deallocate network for instance. [ 938.952855] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-064fa4d0-df95-4930-985c-d30de278cda0 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Expecting reply to msg 0b4b0bcf85854856b6e28db67dbcbef0 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 938.973239] env[62096]: DEBUG oslo_concurrency.lockutils [None req-ee01a51e-dfc7-48ec-b8ca-f308df27a4d0 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.182s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 938.973741] env[62096]: DEBUG nova.compute.manager [None req-ee01a51e-dfc7-48ec-b8ca-f308df27a4d0 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] Start building networks asynchronously for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 938.975455] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee01a51e-dfc7-48ec-b8ca-f308df27a4d0 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Expecting reply to msg 737f34ed36754be9a64efb4c0ce3bade in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 938.976995] env[62096]: DEBUG oslo_concurrency.lockutils [None req-0fdf712d-9ab7-47de-a5a9-f02790031e75 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 13.900s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 938.979443] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-0fdf712d-9ab7-47de-a5a9-f02790031e75 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg 6638ad792af048539ecf890187bd9451 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 938.996855] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0b4b0bcf85854856b6e28db67dbcbef0 [ 939.006385] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 737f34ed36754be9a64efb4c0ce3bade [ 939.014868] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6638ad792af048539ecf890187bd9451 [ 939.361293] env[62096]: INFO nova.compute.manager [-] [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] Took 1.11 seconds to deallocate network for instance. [ 939.362907] env[62096]: DEBUG nova.compute.claims [None req-abde4e35-3d25-4bdf-ac75-6b77642d395c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] Aborting claim: {{(pid=62096) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 939.363092] env[62096]: DEBUG oslo_concurrency.lockutils [None req-abde4e35-3d25-4bdf-ac75-6b77642d395c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 939.430851] env[62096]: DEBUG oslo_concurrency.lockutils [None req-5865a84c-0be2-46e8-ab17-dbd444d3f325 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Lock "9f3ce570-878f-48bf-a08c-0387b0556785" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 31.713s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 939.457809] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-064fa4d0-df95-4930-985c-d30de278cda0 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Expecting reply to msg d98fb16aa4c04969bf33a31c8f938987 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 939.478689] env[62096]: DEBUG nova.compute.utils [None req-ee01a51e-dfc7-48ec-b8ca-f308df27a4d0 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Using /dev/sd instead of None {{(pid=62096) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 939.479304] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee01a51e-dfc7-48ec-b8ca-f308df27a4d0 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Expecting reply to msg c6fbab83b8b548859d683e06c29be360 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 939.480271] env[62096]: DEBUG nova.compute.manager [None req-ee01a51e-dfc7-48ec-b8ca-f308df27a4d0 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] Allocating IP information in the background. {{(pid=62096) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 939.480433] env[62096]: DEBUG nova.network.neutron [None req-ee01a51e-dfc7-48ec-b8ca-f308df27a4d0 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] allocate_for_instance() {{(pid=62096) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 939.494218] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c6fbab83b8b548859d683e06c29be360 [ 939.496253] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d98fb16aa4c04969bf33a31c8f938987 [ 939.522330] env[62096]: DEBUG nova.policy [None req-ee01a51e-dfc7-48ec-b8ca-f308df27a4d0 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'eccf4b6b1d2b496796ad12d6caad16ed', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '53edc9a849714cedab5fcd7b03ca6916', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62096) authorize /opt/stack/nova/nova/policy.py:203}} [ 939.554905] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Expecting reply to msg 27492de6e352441d8b8a92ef096b4194 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 939.567268] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 27492de6e352441d8b8a92ef096b4194 [ 939.574224] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e131f2f8-7b55-4e9a-89fe-5aec76ad63a4 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.582473] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ac4e95f-4c7f-4f9e-a615-1ccec322206e {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.613180] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cccb6d88-175c-40db-b9a2-859272bf1f02 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.620310] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02871a55-dff2-470d-affc-50d1777244e6 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.633460] env[62096]: DEBUG nova.compute.provider_tree [None req-0fdf712d-9ab7-47de-a5a9-f02790031e75 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 939.633973] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-0fdf712d-9ab7-47de-a5a9-f02790031e75 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg b0608a4502f0454d9501f6c045c42a5a in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 939.641157] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b0608a4502f0454d9501f6c045c42a5a [ 939.807739] env[62096]: DEBUG nova.network.neutron [None req-ee01a51e-dfc7-48ec-b8ca-f308df27a4d0 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] Successfully created port: 6111dbe7-582d-42af-9aeb-ca86af1a7404 {{(pid=62096) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 939.983748] env[62096]: INFO nova.scheduler.client.report [None req-064fa4d0-df95-4930-985c-d30de278cda0 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Deleted allocations for instance 8802af61-3692-4e27-85f4-945afccff231 [ 939.998046] env[62096]: DEBUG nova.compute.manager [None req-ee01a51e-dfc7-48ec-b8ca-f308df27a4d0 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] Start building block device mappings for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 939.998046] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee01a51e-dfc7-48ec-b8ca-f308df27a4d0 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Expecting reply to msg b284ea74b7684e0cbf2850b9e0ced460 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 939.998046] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-064fa4d0-df95-4930-985c-d30de278cda0 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Expecting reply to msg fbf80e77e5d94baca38af7e9c269eb40 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 940.030262] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fbf80e77e5d94baca38af7e9c269eb40 [ 940.033043] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b284ea74b7684e0cbf2850b9e0ced460 [ 940.057725] env[62096]: INFO nova.compute.manager [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] [instance: 9f3ce570-878f-48bf-a08c-0387b0556785] Rebuilding instance [ 940.100262] env[62096]: DEBUG nova.compute.manager [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] [instance: 9f3ce570-878f-48bf-a08c-0387b0556785] Checking state {{(pid=62096) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 940.101075] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00bec46b-44a1-46e3-886e-cdd3099b98bf {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.108937] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Expecting reply to msg a8784c1986fc486fa10d33f862785b30 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 940.136882] env[62096]: DEBUG nova.scheduler.client.report [None req-0fdf712d-9ab7-47de-a5a9-f02790031e75 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 940.139127] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-0fdf712d-9ab7-47de-a5a9-f02790031e75 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg 566c75c702f448cda043c001a084600d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 940.140150] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a8784c1986fc486fa10d33f862785b30 [ 940.152116] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 566c75c702f448cda043c001a084600d [ 940.497041] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee01a51e-dfc7-48ec-b8ca-f308df27a4d0 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Expecting reply to msg ee5ad157852e489882bf4cfdc7401e3a in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 940.498152] env[62096]: DEBUG oslo_concurrency.lockutils [None req-064fa4d0-df95-4930-985c-d30de278cda0 tempest-AttachVolumeNegativeTest-1077674641 tempest-AttachVolumeNegativeTest-1077674641-project-member] Lock "8802af61-3692-4e27-85f4-945afccff231" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 76.601s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 940.530468] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ee5ad157852e489882bf4cfdc7401e3a [ 940.611194] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] [instance: 9f3ce570-878f-48bf-a08c-0387b0556785] Powering off the VM {{(pid=62096) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 940.611765] env[62096]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2c7fb7d2-246d-4b8d-8e42-ff6b08afe9d9 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.618516] env[62096]: DEBUG oslo_vmware.api [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Waiting for the task: (returnval){ [ 940.618516] env[62096]: value = "task-397474" [ 940.618516] env[62096]: _type = "Task" [ 940.618516] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.626435] env[62096]: DEBUG oslo_vmware.api [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Task: {'id': task-397474, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.641374] env[62096]: DEBUG oslo_concurrency.lockutils [None req-0fdf712d-9ab7-47de-a5a9-f02790031e75 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.664s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 940.642036] env[62096]: ERROR nova.compute.manager [None req-0fdf712d-9ab7-47de-a5a9-f02790031e75 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 809ee1cb-7807-4bd3-b19d-13bbe996c738, please check neutron logs for more information. [ 940.642036] env[62096]: ERROR nova.compute.manager [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] Traceback (most recent call last): [ 940.642036] env[62096]: ERROR nova.compute.manager [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 940.642036] env[62096]: ERROR nova.compute.manager [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] self.driver.spawn(context, instance, image_meta, [ 940.642036] env[62096]: ERROR nova.compute.manager [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 940.642036] env[62096]: ERROR nova.compute.manager [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 940.642036] env[62096]: ERROR nova.compute.manager [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 940.642036] env[62096]: ERROR nova.compute.manager [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] vm_ref = self.build_virtual_machine(instance, [ 940.642036] env[62096]: ERROR nova.compute.manager [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 940.642036] env[62096]: ERROR nova.compute.manager [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] vif_infos = vmwarevif.get_vif_info(self._session, [ 940.642036] env[62096]: ERROR nova.compute.manager [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 940.642473] env[62096]: ERROR nova.compute.manager [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] for vif in network_info: [ 940.642473] env[62096]: ERROR nova.compute.manager [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 940.642473] env[62096]: ERROR nova.compute.manager [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] return self._sync_wrapper(fn, *args, **kwargs) [ 940.642473] env[62096]: ERROR nova.compute.manager [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 940.642473] env[62096]: ERROR nova.compute.manager [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] self.wait() [ 940.642473] env[62096]: ERROR nova.compute.manager [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 940.642473] env[62096]: ERROR nova.compute.manager [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] self[:] = self._gt.wait() [ 940.642473] env[62096]: ERROR nova.compute.manager [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 940.642473] env[62096]: ERROR nova.compute.manager [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] return self._exit_event.wait() [ 940.642473] env[62096]: ERROR nova.compute.manager [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 940.642473] env[62096]: ERROR nova.compute.manager [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] current.throw(*self._exc) [ 940.642473] env[62096]: ERROR nova.compute.manager [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 940.642473] env[62096]: ERROR nova.compute.manager [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] result = function(*args, **kwargs) [ 940.642942] env[62096]: ERROR nova.compute.manager [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 940.642942] env[62096]: ERROR nova.compute.manager [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] return func(*args, **kwargs) [ 940.642942] env[62096]: ERROR nova.compute.manager [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 940.642942] env[62096]: ERROR nova.compute.manager [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] raise e [ 940.642942] env[62096]: ERROR nova.compute.manager [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 940.642942] env[62096]: ERROR nova.compute.manager [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] nwinfo = self.network_api.allocate_for_instance( [ 940.642942] env[62096]: ERROR nova.compute.manager [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 940.642942] env[62096]: ERROR nova.compute.manager [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] created_port_ids = self._update_ports_for_instance( [ 940.642942] env[62096]: ERROR nova.compute.manager [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 940.642942] env[62096]: ERROR nova.compute.manager [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] with excutils.save_and_reraise_exception(): [ 940.642942] env[62096]: ERROR nova.compute.manager [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 940.642942] env[62096]: ERROR nova.compute.manager [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] self.force_reraise() [ 940.642942] env[62096]: ERROR nova.compute.manager [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 940.643380] env[62096]: ERROR nova.compute.manager [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] raise self.value [ 940.643380] env[62096]: ERROR nova.compute.manager [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 940.643380] env[62096]: ERROR nova.compute.manager [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] updated_port = self._update_port( [ 940.643380] env[62096]: ERROR nova.compute.manager [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 940.643380] env[62096]: ERROR nova.compute.manager [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] _ensure_no_port_binding_failure(port) [ 940.643380] env[62096]: ERROR nova.compute.manager [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 940.643380] env[62096]: ERROR nova.compute.manager [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] raise exception.PortBindingFailed(port_id=port['id']) [ 940.643380] env[62096]: ERROR nova.compute.manager [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] nova.exception.PortBindingFailed: Binding failed for port 809ee1cb-7807-4bd3-b19d-13bbe996c738, please check neutron logs for more information. [ 940.643380] env[62096]: ERROR nova.compute.manager [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] [ 940.643380] env[62096]: DEBUG nova.compute.utils [None req-0fdf712d-9ab7-47de-a5a9-f02790031e75 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] Binding failed for port 809ee1cb-7807-4bd3-b19d-13bbe996c738, please check neutron logs for more information. {{(pid=62096) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 940.643937] env[62096]: DEBUG oslo_concurrency.lockutils [None req-66abb748-9105-4dd1-b487-dfa24b044df5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 11.517s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 940.649173] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-66abb748-9105-4dd1-b487-dfa24b044df5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg 52e0f591d7a34ad9b6d6c66439539ac1 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 940.649173] env[62096]: DEBUG nova.compute.manager [None req-0fdf712d-9ab7-47de-a5a9-f02790031e75 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] Build of instance 0f275603-acfc-43db-8a71-a17af8e837b4 was re-scheduled: Binding failed for port 809ee1cb-7807-4bd3-b19d-13bbe996c738, please check neutron logs for more information. {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 940.649173] env[62096]: DEBUG nova.compute.manager [None req-0fdf712d-9ab7-47de-a5a9-f02790031e75 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] Unplugging VIFs for instance {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 940.649173] env[62096]: DEBUG oslo_concurrency.lockutils [None req-0fdf712d-9ab7-47de-a5a9-f02790031e75 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Acquiring lock "refresh_cache-0f275603-acfc-43db-8a71-a17af8e837b4" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 940.649173] env[62096]: DEBUG oslo_concurrency.lockutils [None req-0fdf712d-9ab7-47de-a5a9-f02790031e75 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Acquired lock "refresh_cache-0f275603-acfc-43db-8a71-a17af8e837b4" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 940.649883] env[62096]: DEBUG nova.network.neutron [None req-0fdf712d-9ab7-47de-a5a9-f02790031e75 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 940.649883] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-0fdf712d-9ab7-47de-a5a9-f02790031e75 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg 0af520ed06e94bdf82aef30665c89caf in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 940.659427] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0af520ed06e94bdf82aef30665c89caf [ 940.687849] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 52e0f591d7a34ad9b6d6c66439539ac1 [ 940.764658] env[62096]: DEBUG nova.compute.manager [req-09e5a0ff-92f7-4294-86f8-2871dae068eb req-6f8dfd34-7d06-4495-937d-8089599e7c2a service nova] [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] Received event network-changed-6111dbe7-582d-42af-9aeb-ca86af1a7404 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 940.764758] env[62096]: DEBUG nova.compute.manager [req-09e5a0ff-92f7-4294-86f8-2871dae068eb req-6f8dfd34-7d06-4495-937d-8089599e7c2a service nova] [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] Refreshing instance network info cache due to event network-changed-6111dbe7-582d-42af-9aeb-ca86af1a7404. {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 940.764900] env[62096]: DEBUG oslo_concurrency.lockutils [req-09e5a0ff-92f7-4294-86f8-2871dae068eb req-6f8dfd34-7d06-4495-937d-8089599e7c2a service nova] Acquiring lock "refresh_cache-a960bdaa-ed46-4f5e-ad1a-56b06589c362" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 940.765055] env[62096]: DEBUG oslo_concurrency.lockutils [req-09e5a0ff-92f7-4294-86f8-2871dae068eb req-6f8dfd34-7d06-4495-937d-8089599e7c2a service nova] Acquired lock "refresh_cache-a960bdaa-ed46-4f5e-ad1a-56b06589c362" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 940.765677] env[62096]: DEBUG nova.network.neutron [req-09e5a0ff-92f7-4294-86f8-2871dae068eb req-6f8dfd34-7d06-4495-937d-8089599e7c2a service nova] [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] Refreshing network info cache for port 6111dbe7-582d-42af-9aeb-ca86af1a7404 {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 940.766217] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-09e5a0ff-92f7-4294-86f8-2871dae068eb req-6f8dfd34-7d06-4495-937d-8089599e7c2a service nova] Expecting reply to msg d6ba0533a3e14ac28dd7281ec28718b1 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 940.773062] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d6ba0533a3e14ac28dd7281ec28718b1 [ 940.940300] env[62096]: ERROR nova.compute.manager [None req-ee01a51e-dfc7-48ec-b8ca-f308df27a4d0 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 6111dbe7-582d-42af-9aeb-ca86af1a7404, please check neutron logs for more information. [ 940.940300] env[62096]: ERROR nova.compute.manager Traceback (most recent call last): [ 940.940300] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 940.940300] env[62096]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 940.940300] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 940.940300] env[62096]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 940.940300] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 940.940300] env[62096]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 940.940300] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 940.940300] env[62096]: ERROR nova.compute.manager self.force_reraise() [ 940.940300] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 940.940300] env[62096]: ERROR nova.compute.manager raise self.value [ 940.940300] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 940.940300] env[62096]: ERROR nova.compute.manager updated_port = self._update_port( [ 940.940300] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 940.940300] env[62096]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 940.940799] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 940.940799] env[62096]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 940.940799] env[62096]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 6111dbe7-582d-42af-9aeb-ca86af1a7404, please check neutron logs for more information. [ 940.940799] env[62096]: ERROR nova.compute.manager [ 940.940799] env[62096]: Traceback (most recent call last): [ 940.940799] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 940.940799] env[62096]: listener.cb(fileno) [ 940.940799] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 940.940799] env[62096]: result = function(*args, **kwargs) [ 940.940799] env[62096]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 940.940799] env[62096]: return func(*args, **kwargs) [ 940.940799] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 940.940799] env[62096]: raise e [ 940.940799] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 940.940799] env[62096]: nwinfo = self.network_api.allocate_for_instance( [ 940.940799] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 940.940799] env[62096]: created_port_ids = self._update_ports_for_instance( [ 940.940799] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 940.940799] env[62096]: with excutils.save_and_reraise_exception(): [ 940.940799] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 940.940799] env[62096]: self.force_reraise() [ 940.940799] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 940.940799] env[62096]: raise self.value [ 940.940799] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 940.940799] env[62096]: updated_port = self._update_port( [ 940.940799] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 940.940799] env[62096]: _ensure_no_port_binding_failure(port) [ 940.940799] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 940.940799] env[62096]: raise exception.PortBindingFailed(port_id=port['id']) [ 940.941635] env[62096]: nova.exception.PortBindingFailed: Binding failed for port 6111dbe7-582d-42af-9aeb-ca86af1a7404, please check neutron logs for more information. [ 940.941635] env[62096]: Removing descriptor: 16 [ 941.000439] env[62096]: DEBUG nova.compute.manager [None req-ee01a51e-dfc7-48ec-b8ca-f308df27a4d0 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] Start spawning the instance on the hypervisor. {{(pid=62096) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 941.023829] env[62096]: DEBUG nova.virt.hardware [None req-ee01a51e-dfc7-48ec-b8ca-f308df27a4d0 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-19T11:52:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-19T11:51:55Z,direct_url=,disk_format='vmdk',id=fb764baa-3805-45c4-a694-aa91b0932110,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bd76085d004b408bbcf1ebf76ba2593e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-19T11:51:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 941.024171] env[62096]: DEBUG nova.virt.hardware [None req-ee01a51e-dfc7-48ec-b8ca-f308df27a4d0 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Flavor limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 941.024369] env[62096]: DEBUG nova.virt.hardware [None req-ee01a51e-dfc7-48ec-b8ca-f308df27a4d0 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Image limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 941.024554] env[62096]: DEBUG nova.virt.hardware [None req-ee01a51e-dfc7-48ec-b8ca-f308df27a4d0 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Flavor pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 941.024719] env[62096]: DEBUG nova.virt.hardware [None req-ee01a51e-dfc7-48ec-b8ca-f308df27a4d0 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Image pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 941.025101] env[62096]: DEBUG nova.virt.hardware [None req-ee01a51e-dfc7-48ec-b8ca-f308df27a4d0 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 941.025347] env[62096]: DEBUG nova.virt.hardware [None req-ee01a51e-dfc7-48ec-b8ca-f308df27a4d0 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 941.025566] env[62096]: DEBUG nova.virt.hardware [None req-ee01a51e-dfc7-48ec-b8ca-f308df27a4d0 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 941.025860] env[62096]: DEBUG nova.virt.hardware [None req-ee01a51e-dfc7-48ec-b8ca-f308df27a4d0 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Got 1 possible topologies {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 941.026098] env[62096]: DEBUG nova.virt.hardware [None req-ee01a51e-dfc7-48ec-b8ca-f308df27a4d0 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 941.026598] env[62096]: DEBUG nova.virt.hardware [None req-ee01a51e-dfc7-48ec-b8ca-f308df27a4d0 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 941.027262] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f46366f1-16ed-4096-bff2-63411ec06161 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.039523] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-166cf060-ce6c-4d77-95ec-db2b4d6cdc13 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.055814] env[62096]: ERROR nova.compute.manager [None req-ee01a51e-dfc7-48ec-b8ca-f308df27a4d0 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 6111dbe7-582d-42af-9aeb-ca86af1a7404, please check neutron logs for more information. [ 941.055814] env[62096]: ERROR nova.compute.manager [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] Traceback (most recent call last): [ 941.055814] env[62096]: ERROR nova.compute.manager [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 941.055814] env[62096]: ERROR nova.compute.manager [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] yield resources [ 941.055814] env[62096]: ERROR nova.compute.manager [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 941.055814] env[62096]: ERROR nova.compute.manager [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] self.driver.spawn(context, instance, image_meta, [ 941.055814] env[62096]: ERROR nova.compute.manager [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 941.055814] env[62096]: ERROR nova.compute.manager [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] self._vmops.spawn(context, instance, image_meta, injected_files, [ 941.055814] env[62096]: ERROR nova.compute.manager [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 941.055814] env[62096]: ERROR nova.compute.manager [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] vm_ref = self.build_virtual_machine(instance, [ 941.055814] env[62096]: ERROR nova.compute.manager [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 941.056270] env[62096]: ERROR nova.compute.manager [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] vif_infos = vmwarevif.get_vif_info(self._session, [ 941.056270] env[62096]: ERROR nova.compute.manager [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 941.056270] env[62096]: ERROR nova.compute.manager [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] for vif in network_info: [ 941.056270] env[62096]: ERROR nova.compute.manager [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 941.056270] env[62096]: ERROR nova.compute.manager [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] return self._sync_wrapper(fn, *args, **kwargs) [ 941.056270] env[62096]: ERROR nova.compute.manager [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 941.056270] env[62096]: ERROR nova.compute.manager [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] self.wait() [ 941.056270] env[62096]: ERROR nova.compute.manager [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 941.056270] env[62096]: ERROR nova.compute.manager [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] self[:] = self._gt.wait() [ 941.056270] env[62096]: ERROR nova.compute.manager [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 941.056270] env[62096]: ERROR nova.compute.manager [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] return self._exit_event.wait() [ 941.056270] env[62096]: ERROR nova.compute.manager [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 941.056270] env[62096]: ERROR nova.compute.manager [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] current.throw(*self._exc) [ 941.056675] env[62096]: ERROR nova.compute.manager [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 941.056675] env[62096]: ERROR nova.compute.manager [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] result = function(*args, **kwargs) [ 941.056675] env[62096]: ERROR nova.compute.manager [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 941.056675] env[62096]: ERROR nova.compute.manager [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] return func(*args, **kwargs) [ 941.056675] env[62096]: ERROR nova.compute.manager [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 941.056675] env[62096]: ERROR nova.compute.manager [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] raise e [ 941.056675] env[62096]: ERROR nova.compute.manager [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 941.056675] env[62096]: ERROR nova.compute.manager [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] nwinfo = self.network_api.allocate_for_instance( [ 941.056675] env[62096]: ERROR nova.compute.manager [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 941.056675] env[62096]: ERROR nova.compute.manager [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] created_port_ids = self._update_ports_for_instance( [ 941.056675] env[62096]: ERROR nova.compute.manager [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 941.056675] env[62096]: ERROR nova.compute.manager [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] with excutils.save_and_reraise_exception(): [ 941.056675] env[62096]: ERROR nova.compute.manager [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 941.057072] env[62096]: ERROR nova.compute.manager [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] self.force_reraise() [ 941.057072] env[62096]: ERROR nova.compute.manager [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 941.057072] env[62096]: ERROR nova.compute.manager [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] raise self.value [ 941.057072] env[62096]: ERROR nova.compute.manager [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 941.057072] env[62096]: ERROR nova.compute.manager [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] updated_port = self._update_port( [ 941.057072] env[62096]: ERROR nova.compute.manager [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 941.057072] env[62096]: ERROR nova.compute.manager [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] _ensure_no_port_binding_failure(port) [ 941.057072] env[62096]: ERROR nova.compute.manager [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 941.057072] env[62096]: ERROR nova.compute.manager [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] raise exception.PortBindingFailed(port_id=port['id']) [ 941.057072] env[62096]: ERROR nova.compute.manager [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] nova.exception.PortBindingFailed: Binding failed for port 6111dbe7-582d-42af-9aeb-ca86af1a7404, please check neutron logs for more information. [ 941.057072] env[62096]: ERROR nova.compute.manager [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] [ 941.057072] env[62096]: INFO nova.compute.manager [None req-ee01a51e-dfc7-48ec-b8ca-f308df27a4d0 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] Terminating instance [ 941.058543] env[62096]: DEBUG oslo_concurrency.lockutils [None req-ee01a51e-dfc7-48ec-b8ca-f308df27a4d0 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Acquiring lock "refresh_cache-a960bdaa-ed46-4f5e-ad1a-56b06589c362" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 941.128234] env[62096]: DEBUG oslo_vmware.api [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Task: {'id': task-397474, 'name': PowerOffVM_Task, 'duration_secs': 0.12209} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.128582] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] [instance: 9f3ce570-878f-48bf-a08c-0387b0556785] Powered off the VM {{(pid=62096) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 941.128804] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] [instance: 9f3ce570-878f-48bf-a08c-0387b0556785] Destroying instance {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 941.129569] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4822d92c-4e99-4a1e-9bef-704c697b8297 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.136530] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] [instance: 9f3ce570-878f-48bf-a08c-0387b0556785] Unregistering the VM {{(pid=62096) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 941.136765] env[62096]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-91998879-8f33-49a8-bfad-c150abb9301a {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.162954] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] [instance: 9f3ce570-878f-48bf-a08c-0387b0556785] Unregistered the VM {{(pid=62096) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 941.163166] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] [instance: 9f3ce570-878f-48bf-a08c-0387b0556785] Deleting contents of the VM from datastore datastore2 {{(pid=62096) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 941.163339] env[62096]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Deleting the datastore file [datastore2] 9f3ce570-878f-48bf-a08c-0387b0556785 {{(pid=62096) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 941.163869] env[62096]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5a90653e-e8a4-4784-b061-152413c9e662 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.170161] env[62096]: DEBUG oslo_vmware.api [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Waiting for the task: (returnval){ [ 941.170161] env[62096]: value = "task-397476" [ 941.170161] env[62096]: _type = "Task" [ 941.170161] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.179885] env[62096]: DEBUG oslo_vmware.api [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Task: {'id': task-397476, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.184245] env[62096]: DEBUG nova.network.neutron [None req-0fdf712d-9ab7-47de-a5a9-f02790031e75 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 941.266402] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ec1465c-3a8e-4924-98eb-8001e79f3dd6 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.276052] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7d6809c-facb-4509-8b20-6fb5b61cf82f {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.305648] env[62096]: DEBUG nova.network.neutron [req-09e5a0ff-92f7-4294-86f8-2871dae068eb req-6f8dfd34-7d06-4495-937d-8089599e7c2a service nova] [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 941.312688] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56814c03-6557-4d8f-9451-0efb818cd219 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.312688] env[62096]: DEBUG nova.network.neutron [None req-0fdf712d-9ab7-47de-a5a9-f02790031e75 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 941.312688] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-0fdf712d-9ab7-47de-a5a9-f02790031e75 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg 9b68c011ca654ec1bd504906d826a7af in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 941.316187] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cb321a8-5705-4641-a734-84f3c6609fa1 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.320193] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9b68c011ca654ec1bd504906d826a7af [ 941.329793] env[62096]: DEBUG nova.compute.provider_tree [None req-66abb748-9105-4dd1-b487-dfa24b044df5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 941.330255] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-66abb748-9105-4dd1-b487-dfa24b044df5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg 1f985c9b1b574cd38b613a79c92109be in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 941.338027] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1f985c9b1b574cd38b613a79c92109be [ 941.391852] env[62096]: DEBUG nova.network.neutron [req-09e5a0ff-92f7-4294-86f8-2871dae068eb req-6f8dfd34-7d06-4495-937d-8089599e7c2a service nova] [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 941.392454] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-09e5a0ff-92f7-4294-86f8-2871dae068eb req-6f8dfd34-7d06-4495-937d-8089599e7c2a service nova] Expecting reply to msg ace9f926c69a4b71a03fd3aced545655 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 941.400680] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ace9f926c69a4b71a03fd3aced545655 [ 941.679333] env[62096]: DEBUG oslo_vmware.api [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Task: {'id': task-397476, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.093601} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.679685] env[62096]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Deleted the datastore file {{(pid=62096) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 941.679739] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] [instance: 9f3ce570-878f-48bf-a08c-0387b0556785] Deleted contents of the VM from datastore datastore2 {{(pid=62096) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 941.679911] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] [instance: 9f3ce570-878f-48bf-a08c-0387b0556785] Instance destroyed {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 941.681496] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Expecting reply to msg 1318ab1d74e34ad2820a6dd4d4f398df in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 941.715508] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1318ab1d74e34ad2820a6dd4d4f398df [ 941.812671] env[62096]: DEBUG oslo_concurrency.lockutils [None req-0fdf712d-9ab7-47de-a5a9-f02790031e75 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Releasing lock "refresh_cache-0f275603-acfc-43db-8a71-a17af8e837b4" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 941.812806] env[62096]: DEBUG nova.compute.manager [None req-0fdf712d-9ab7-47de-a5a9-f02790031e75 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 941.812998] env[62096]: DEBUG nova.compute.manager [None req-0fdf712d-9ab7-47de-a5a9-f02790031e75 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 941.813164] env[62096]: DEBUG nova.network.neutron [None req-0fdf712d-9ab7-47de-a5a9-f02790031e75 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 941.832868] env[62096]: DEBUG nova.scheduler.client.report [None req-66abb748-9105-4dd1-b487-dfa24b044df5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 941.834917] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-66abb748-9105-4dd1-b487-dfa24b044df5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg 813085a7f6334a369dbe56a86aa50869 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 941.842449] env[62096]: DEBUG nova.network.neutron [None req-0fdf712d-9ab7-47de-a5a9-f02790031e75 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 941.842449] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-0fdf712d-9ab7-47de-a5a9-f02790031e75 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg b6dd9f5017f54e34abae9a3438ed0ebb in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 941.847171] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b6dd9f5017f54e34abae9a3438ed0ebb [ 941.847650] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 813085a7f6334a369dbe56a86aa50869 [ 941.894261] env[62096]: DEBUG oslo_concurrency.lockutils [req-09e5a0ff-92f7-4294-86f8-2871dae068eb req-6f8dfd34-7d06-4495-937d-8089599e7c2a service nova] Releasing lock "refresh_cache-a960bdaa-ed46-4f5e-ad1a-56b06589c362" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 941.894679] env[62096]: DEBUG oslo_concurrency.lockutils [None req-ee01a51e-dfc7-48ec-b8ca-f308df27a4d0 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Acquired lock "refresh_cache-a960bdaa-ed46-4f5e-ad1a-56b06589c362" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 941.894867] env[62096]: DEBUG nova.network.neutron [None req-ee01a51e-dfc7-48ec-b8ca-f308df27a4d0 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 941.895318] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee01a51e-dfc7-48ec-b8ca-f308df27a4d0 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Expecting reply to msg bca3b823459a41dfb46c66eb714114f9 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 941.901832] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bca3b823459a41dfb46c66eb714114f9 [ 942.186895] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Expecting reply to msg 2847542d3b064b20bb227c0500c3095c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 942.226571] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2847542d3b064b20bb227c0500c3095c [ 942.339417] env[62096]: DEBUG oslo_concurrency.lockutils [None req-66abb748-9105-4dd1-b487-dfa24b044df5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.695s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 942.340060] env[62096]: ERROR nova.compute.manager [None req-66abb748-9105-4dd1-b487-dfa24b044df5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 77aaac28-df93-47dd-a289-96e93d2bc51d, please check neutron logs for more information. [ 942.340060] env[62096]: ERROR nova.compute.manager [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] Traceback (most recent call last): [ 942.340060] env[62096]: ERROR nova.compute.manager [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 942.340060] env[62096]: ERROR nova.compute.manager [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] self.driver.spawn(context, instance, image_meta, [ 942.340060] env[62096]: ERROR nova.compute.manager [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 942.340060] env[62096]: ERROR nova.compute.manager [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 942.340060] env[62096]: ERROR nova.compute.manager [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 942.340060] env[62096]: ERROR nova.compute.manager [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] vm_ref = self.build_virtual_machine(instance, [ 942.340060] env[62096]: ERROR nova.compute.manager [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 942.340060] env[62096]: ERROR nova.compute.manager [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] vif_infos = vmwarevif.get_vif_info(self._session, [ 942.340060] env[62096]: ERROR nova.compute.manager [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 942.340487] env[62096]: ERROR nova.compute.manager [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] for vif in network_info: [ 942.340487] env[62096]: ERROR nova.compute.manager [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 942.340487] env[62096]: ERROR nova.compute.manager [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] return self._sync_wrapper(fn, *args, **kwargs) [ 942.340487] env[62096]: ERROR nova.compute.manager [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 942.340487] env[62096]: ERROR nova.compute.manager [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] self.wait() [ 942.340487] env[62096]: ERROR nova.compute.manager [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 942.340487] env[62096]: ERROR nova.compute.manager [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] self[:] = self._gt.wait() [ 942.340487] env[62096]: ERROR nova.compute.manager [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 942.340487] env[62096]: ERROR nova.compute.manager [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] return self._exit_event.wait() [ 942.340487] env[62096]: ERROR nova.compute.manager [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 942.340487] env[62096]: ERROR nova.compute.manager [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] current.throw(*self._exc) [ 942.340487] env[62096]: ERROR nova.compute.manager [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 942.340487] env[62096]: ERROR nova.compute.manager [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] result = function(*args, **kwargs) [ 942.340907] env[62096]: ERROR nova.compute.manager [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 942.340907] env[62096]: ERROR nova.compute.manager [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] return func(*args, **kwargs) [ 942.340907] env[62096]: ERROR nova.compute.manager [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 942.340907] env[62096]: ERROR nova.compute.manager [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] raise e [ 942.340907] env[62096]: ERROR nova.compute.manager [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 942.340907] env[62096]: ERROR nova.compute.manager [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] nwinfo = self.network_api.allocate_for_instance( [ 942.340907] env[62096]: ERROR nova.compute.manager [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 942.340907] env[62096]: ERROR nova.compute.manager [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] created_port_ids = self._update_ports_for_instance( [ 942.340907] env[62096]: ERROR nova.compute.manager [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 942.340907] env[62096]: ERROR nova.compute.manager [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] with excutils.save_and_reraise_exception(): [ 942.340907] env[62096]: ERROR nova.compute.manager [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 942.340907] env[62096]: ERROR nova.compute.manager [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] self.force_reraise() [ 942.340907] env[62096]: ERROR nova.compute.manager [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 942.341252] env[62096]: ERROR nova.compute.manager [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] raise self.value [ 942.341252] env[62096]: ERROR nova.compute.manager [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 942.341252] env[62096]: ERROR nova.compute.manager [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] updated_port = self._update_port( [ 942.341252] env[62096]: ERROR nova.compute.manager [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 942.341252] env[62096]: ERROR nova.compute.manager [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] _ensure_no_port_binding_failure(port) [ 942.341252] env[62096]: ERROR nova.compute.manager [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 942.341252] env[62096]: ERROR nova.compute.manager [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] raise exception.PortBindingFailed(port_id=port['id']) [ 942.341252] env[62096]: ERROR nova.compute.manager [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] nova.exception.PortBindingFailed: Binding failed for port 77aaac28-df93-47dd-a289-96e93d2bc51d, please check neutron logs for more information. [ 942.341252] env[62096]: ERROR nova.compute.manager [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] [ 942.341252] env[62096]: DEBUG nova.compute.utils [None req-66abb748-9105-4dd1-b487-dfa24b044df5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] Binding failed for port 77aaac28-df93-47dd-a289-96e93d2bc51d, please check neutron logs for more information. {{(pid=62096) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 942.342127] env[62096]: DEBUG oslo_concurrency.lockutils [None req-abde4e35-3d25-4bdf-ac75-6b77642d395c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 2.979s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 942.344281] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-abde4e35-3d25-4bdf-ac75-6b77642d395c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg 5ee17963230b449d8b6d9f55f2f40a43 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 942.345027] env[62096]: DEBUG nova.network.neutron [None req-0fdf712d-9ab7-47de-a5a9-f02790031e75 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 942.345653] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-0fdf712d-9ab7-47de-a5a9-f02790031e75 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg e237ededbcd14241a1a638208e7df689 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 942.346476] env[62096]: DEBUG nova.compute.manager [None req-66abb748-9105-4dd1-b487-dfa24b044df5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] Build of instance 2d42e5f7-b293-4133-b279-63a8320bc19d was re-scheduled: Binding failed for port 77aaac28-df93-47dd-a289-96e93d2bc51d, please check neutron logs for more information. {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 942.346924] env[62096]: DEBUG nova.compute.manager [None req-66abb748-9105-4dd1-b487-dfa24b044df5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] Unplugging VIFs for instance {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 942.347153] env[62096]: DEBUG oslo_concurrency.lockutils [None req-66abb748-9105-4dd1-b487-dfa24b044df5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Acquiring lock "refresh_cache-2d42e5f7-b293-4133-b279-63a8320bc19d" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 942.347304] env[62096]: DEBUG oslo_concurrency.lockutils [None req-66abb748-9105-4dd1-b487-dfa24b044df5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Acquired lock "refresh_cache-2d42e5f7-b293-4133-b279-63a8320bc19d" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 942.347465] env[62096]: DEBUG nova.network.neutron [None req-66abb748-9105-4dd1-b487-dfa24b044df5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 942.347825] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-66abb748-9105-4dd1-b487-dfa24b044df5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg 520b414db41e4e46af9ef0b65de58ba7 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 942.361693] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e237ededbcd14241a1a638208e7df689 [ 942.364795] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 520b414db41e4e46af9ef0b65de58ba7 [ 942.383797] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5ee17963230b449d8b6d9f55f2f40a43 [ 942.418083] env[62096]: DEBUG nova.network.neutron [None req-ee01a51e-dfc7-48ec-b8ca-f308df27a4d0 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 942.496243] env[62096]: DEBUG nova.network.neutron [None req-ee01a51e-dfc7-48ec-b8ca-f308df27a4d0 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 942.496773] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee01a51e-dfc7-48ec-b8ca-f308df27a4d0 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Expecting reply to msg 6407eb3201fd49b0b9e19cda579037be in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 942.504557] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6407eb3201fd49b0b9e19cda579037be [ 942.716123] env[62096]: DEBUG nova.virt.hardware [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-19T11:52:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-19T11:51:55Z,direct_url=,disk_format='vmdk',id=fb764baa-3805-45c4-a694-aa91b0932110,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bd76085d004b408bbcf1ebf76ba2593e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-19T11:51:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 942.716448] env[62096]: DEBUG nova.virt.hardware [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Flavor limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 942.716526] env[62096]: DEBUG nova.virt.hardware [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Image limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 942.716676] env[62096]: DEBUG nova.virt.hardware [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Flavor pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 942.716818] env[62096]: DEBUG nova.virt.hardware [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Image pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 942.716959] env[62096]: DEBUG nova.virt.hardware [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 942.717161] env[62096]: DEBUG nova.virt.hardware [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 942.717319] env[62096]: DEBUG nova.virt.hardware [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 942.717479] env[62096]: DEBUG nova.virt.hardware [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Got 1 possible topologies {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 942.717636] env[62096]: DEBUG nova.virt.hardware [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 942.717801] env[62096]: DEBUG nova.virt.hardware [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 942.718648] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61f6237a-df33-4c6e-bf8e-0cc1bd9ff2de {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.726773] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d1559f1-aabe-45fa-b39b-3a4713c830d7 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.739644] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] [instance: 9f3ce570-878f-48bf-a08c-0387b0556785] Instance VIF info [] {{(pid=62096) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 942.745153] env[62096]: DEBUG oslo.service.loopingcall [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 942.745393] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9f3ce570-878f-48bf-a08c-0387b0556785] Creating VM on the ESX host {{(pid=62096) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 942.745589] env[62096]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-32d5c035-d119-48f7-ae36-1dcd94ef9154 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.761864] env[62096]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 942.761864] env[62096]: value = "task-397477" [ 942.761864] env[62096]: _type = "Task" [ 942.761864] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.769377] env[62096]: DEBUG oslo_vmware.api [-] Task: {'id': task-397477, 'name': CreateVM_Task} progress is 0%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.790010] env[62096]: DEBUG nova.compute.manager [req-ee6a1629-d0fe-4fe7-8e27-ec1da4c0a5f6 req-901303ac-8cd7-4c34-8466-8040c1657c1c service nova] [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] Received event network-vif-deleted-6111dbe7-582d-42af-9aeb-ca86af1a7404 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 942.855641] env[62096]: INFO nova.compute.manager [None req-0fdf712d-9ab7-47de-a5a9-f02790031e75 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 0f275603-acfc-43db-8a71-a17af8e837b4] Took 1.04 seconds to deallocate network for instance. [ 942.857246] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-0fdf712d-9ab7-47de-a5a9-f02790031e75 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg 1cc0e93226c1468984858be0fd8c0e85 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 942.874244] env[62096]: DEBUG nova.network.neutron [None req-66abb748-9105-4dd1-b487-dfa24b044df5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 942.897819] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1cc0e93226c1468984858be0fd8c0e85 [ 942.947495] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d56a652-c330-4e69-ae4e-08bc24a6fe87 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.960263] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbd41aed-40d4-40d7-9a03-201a6a0a9510 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.988243] env[62096]: DEBUG nova.network.neutron [None req-66abb748-9105-4dd1-b487-dfa24b044df5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 942.988738] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-66abb748-9105-4dd1-b487-dfa24b044df5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg dea98070943640ac8ce78bf6998b2189 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 942.990133] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ce85d3d-008b-46d7-bfb8-56d40b4f45f9 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.998396] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dea98070943640ac8ce78bf6998b2189 [ 943.001997] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fb3c758-ddb5-4d98-9990-89614630f9d1 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.006257] env[62096]: DEBUG oslo_concurrency.lockutils [None req-ee01a51e-dfc7-48ec-b8ca-f308df27a4d0 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Releasing lock "refresh_cache-a960bdaa-ed46-4f5e-ad1a-56b06589c362" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 943.006647] env[62096]: DEBUG nova.compute.manager [None req-ee01a51e-dfc7-48ec-b8ca-f308df27a4d0 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] Start destroying the instance on the hypervisor. {{(pid=62096) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 943.007099] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-ee01a51e-dfc7-48ec-b8ca-f308df27a4d0 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] Destroying instance {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 943.007397] env[62096]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-83cb399d-cd6a-4055-b289-28ec40ab35e7 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.018926] env[62096]: DEBUG nova.compute.provider_tree [None req-abde4e35-3d25-4bdf-ac75-6b77642d395c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 943.019407] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-abde4e35-3d25-4bdf-ac75-6b77642d395c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg 6c160adddec64c228acda85495d6129b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 943.023737] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd13ddfe-c4ec-4fec-9067-300fed0c0faf {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.033661] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6c160adddec64c228acda85495d6129b [ 943.045424] env[62096]: WARNING nova.virt.vmwareapi.vmops [None req-ee01a51e-dfc7-48ec-b8ca-f308df27a4d0 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance a960bdaa-ed46-4f5e-ad1a-56b06589c362 could not be found. [ 943.045629] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-ee01a51e-dfc7-48ec-b8ca-f308df27a4d0 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] Instance destroyed {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 943.045804] env[62096]: INFO nova.compute.manager [None req-ee01a51e-dfc7-48ec-b8ca-f308df27a4d0 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] Took 0.04 seconds to destroy the instance on the hypervisor. [ 943.046061] env[62096]: DEBUG oslo.service.loopingcall [None req-ee01a51e-dfc7-48ec-b8ca-f308df27a4d0 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 943.046291] env[62096]: DEBUG nova.compute.manager [-] [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 943.046386] env[62096]: DEBUG nova.network.neutron [-] [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 943.061771] env[62096]: DEBUG nova.network.neutron [-] [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 943.062292] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 54d29845e3514fb2a9137247e5cbf6ee in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 943.069165] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 54d29845e3514fb2a9137247e5cbf6ee [ 943.271819] env[62096]: DEBUG oslo_vmware.api [-] Task: {'id': task-397477, 'name': CreateVM_Task, 'duration_secs': 0.25021} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.272343] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9f3ce570-878f-48bf-a08c-0387b0556785] Created VM on the ESX host {{(pid=62096) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 943.273243] env[62096]: DEBUG oslo_vmware.service [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5938985-73f5-4bd9-bd98-28d00c8636c6 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.278573] env[62096]: DEBUG oslo_concurrency.lockutils [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 943.278733] env[62096]: DEBUG oslo_concurrency.lockutils [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Acquired lock "[datastore1] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 943.279114] env[62096]: DEBUG oslo_concurrency.lockutils [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 943.279344] env[62096]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-db1ea653-1511-4251-88cd-760dfab7c0a1 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.285123] env[62096]: DEBUG oslo_vmware.api [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Waiting for the task: (returnval){ [ 943.285123] env[62096]: value = "session[5264b2d0-7e3b-3d3c-bfd1-bd630cad6eae]52b2be7b-5455-ff61-4139-3827d7763ba6" [ 943.285123] env[62096]: _type = "Task" [ 943.285123] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.291724] env[62096]: DEBUG oslo_vmware.api [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Task: {'id': session[5264b2d0-7e3b-3d3c-bfd1-bd630cad6eae]52b2be7b-5455-ff61-4139-3827d7763ba6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.364382] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-0fdf712d-9ab7-47de-a5a9-f02790031e75 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg 5ca8c827c7214987b34b7f6d343b11c0 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 943.396740] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5ca8c827c7214987b34b7f6d343b11c0 [ 943.494779] env[62096]: DEBUG oslo_concurrency.lockutils [None req-66abb748-9105-4dd1-b487-dfa24b044df5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Releasing lock "refresh_cache-2d42e5f7-b293-4133-b279-63a8320bc19d" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 943.494955] env[62096]: DEBUG nova.compute.manager [None req-66abb748-9105-4dd1-b487-dfa24b044df5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 943.495345] env[62096]: DEBUG nova.compute.manager [None req-66abb748-9105-4dd1-b487-dfa24b044df5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 943.495538] env[62096]: DEBUG nova.network.neutron [None req-66abb748-9105-4dd1-b487-dfa24b044df5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 943.509813] env[62096]: DEBUG nova.network.neutron [None req-66abb748-9105-4dd1-b487-dfa24b044df5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 943.510437] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-66abb748-9105-4dd1-b487-dfa24b044df5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg 91b41969888e4e40b6f0b57ef34667e5 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 943.516875] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 91b41969888e4e40b6f0b57ef34667e5 [ 943.522366] env[62096]: DEBUG nova.scheduler.client.report [None req-abde4e35-3d25-4bdf-ac75-6b77642d395c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 943.524671] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-abde4e35-3d25-4bdf-ac75-6b77642d395c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg 06a68637f8f54971adff15856c83e4d0 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 943.535723] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 06a68637f8f54971adff15856c83e4d0 [ 943.563598] env[62096]: DEBUG nova.network.neutron [-] [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 943.563958] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg b320d6c9e1534e27b9b333c2329e7144 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 943.570552] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b320d6c9e1534e27b9b333c2329e7144 [ 943.795620] env[62096]: DEBUG oslo_concurrency.lockutils [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Releasing lock "[datastore1] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 943.796030] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] [instance: 9f3ce570-878f-48bf-a08c-0387b0556785] Processing image fb764baa-3805-45c4-a694-aa91b0932110 {{(pid=62096) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 943.796233] env[62096]: DEBUG oslo_concurrency.lockutils [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110/fb764baa-3805-45c4-a694-aa91b0932110.vmdk" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 943.796428] env[62096]: DEBUG oslo_concurrency.lockutils [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Acquired lock "[datastore1] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110/fb764baa-3805-45c4-a694-aa91b0932110.vmdk" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 943.796656] env[62096]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62096) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 943.796921] env[62096]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d5ac05d5-0e4d-4461-af64-5505981755e1 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.814317] env[62096]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62096) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 943.814488] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62096) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 943.815237] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d68e5723-5f84-4a62-9fc7-0689b2c9ff3a {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.821015] env[62096]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1f470377-081f-4c6a-b834-e224dad98045 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.826169] env[62096]: DEBUG oslo_vmware.api [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Waiting for the task: (returnval){ [ 943.826169] env[62096]: value = "session[5264b2d0-7e3b-3d3c-bfd1-bd630cad6eae]5221496c-9ebe-9b2d-7cae-2f37cfeda240" [ 943.826169] env[62096]: _type = "Task" [ 943.826169] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.834612] env[62096]: DEBUG oslo_vmware.api [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Task: {'id': session[5264b2d0-7e3b-3d3c-bfd1-bd630cad6eae]5221496c-9ebe-9b2d-7cae-2f37cfeda240, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.902377] env[62096]: INFO nova.scheduler.client.report [None req-0fdf712d-9ab7-47de-a5a9-f02790031e75 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Deleted allocations for instance 0f275603-acfc-43db-8a71-a17af8e837b4 [ 943.908368] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-0fdf712d-9ab7-47de-a5a9-f02790031e75 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg 3facfc52d58d466683aab2abf1f680af in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 943.922780] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3facfc52d58d466683aab2abf1f680af [ 944.012838] env[62096]: DEBUG nova.network.neutron [None req-66abb748-9105-4dd1-b487-dfa24b044df5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 944.013363] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-66abb748-9105-4dd1-b487-dfa24b044df5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg 43e9f9c4a30b480aa264cc43593ac892 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 944.022218] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 43e9f9c4a30b480aa264cc43593ac892 [ 944.026542] env[62096]: DEBUG oslo_concurrency.lockutils [None req-abde4e35-3d25-4bdf-ac75-6b77642d395c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.684s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 944.027149] env[62096]: ERROR nova.compute.manager [None req-abde4e35-3d25-4bdf-ac75-6b77642d395c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 1d4e854d-9749-4e8b-9a8a-eb9a812ee623, please check neutron logs for more information. [ 944.027149] env[62096]: ERROR nova.compute.manager [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] Traceback (most recent call last): [ 944.027149] env[62096]: ERROR nova.compute.manager [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 944.027149] env[62096]: ERROR nova.compute.manager [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] self.driver.spawn(context, instance, image_meta, [ 944.027149] env[62096]: ERROR nova.compute.manager [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 944.027149] env[62096]: ERROR nova.compute.manager [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] self._vmops.spawn(context, instance, image_meta, injected_files, [ 944.027149] env[62096]: ERROR nova.compute.manager [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 944.027149] env[62096]: ERROR nova.compute.manager [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] vm_ref = self.build_virtual_machine(instance, [ 944.027149] env[62096]: ERROR nova.compute.manager [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 944.027149] env[62096]: ERROR nova.compute.manager [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] vif_infos = vmwarevif.get_vif_info(self._session, [ 944.027149] env[62096]: ERROR nova.compute.manager [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 944.027514] env[62096]: ERROR nova.compute.manager [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] for vif in network_info: [ 944.027514] env[62096]: ERROR nova.compute.manager [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 944.027514] env[62096]: ERROR nova.compute.manager [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] return self._sync_wrapper(fn, *args, **kwargs) [ 944.027514] env[62096]: ERROR nova.compute.manager [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 944.027514] env[62096]: ERROR nova.compute.manager [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] self.wait() [ 944.027514] env[62096]: ERROR nova.compute.manager [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 944.027514] env[62096]: ERROR nova.compute.manager [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] self[:] = self._gt.wait() [ 944.027514] env[62096]: ERROR nova.compute.manager [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 944.027514] env[62096]: ERROR nova.compute.manager [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] return self._exit_event.wait() [ 944.027514] env[62096]: ERROR nova.compute.manager [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 944.027514] env[62096]: ERROR nova.compute.manager [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] current.throw(*self._exc) [ 944.027514] env[62096]: ERROR nova.compute.manager [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 944.027514] env[62096]: ERROR nova.compute.manager [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] result = function(*args, **kwargs) [ 944.027949] env[62096]: ERROR nova.compute.manager [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 944.027949] env[62096]: ERROR nova.compute.manager [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] return func(*args, **kwargs) [ 944.027949] env[62096]: ERROR nova.compute.manager [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 944.027949] env[62096]: ERROR nova.compute.manager [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] raise e [ 944.027949] env[62096]: ERROR nova.compute.manager [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 944.027949] env[62096]: ERROR nova.compute.manager [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] nwinfo = self.network_api.allocate_for_instance( [ 944.027949] env[62096]: ERROR nova.compute.manager [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 944.027949] env[62096]: ERROR nova.compute.manager [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] created_port_ids = self._update_ports_for_instance( [ 944.027949] env[62096]: ERROR nova.compute.manager [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 944.027949] env[62096]: ERROR nova.compute.manager [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] with excutils.save_and_reraise_exception(): [ 944.027949] env[62096]: ERROR nova.compute.manager [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 944.027949] env[62096]: ERROR nova.compute.manager [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] self.force_reraise() [ 944.027949] env[62096]: ERROR nova.compute.manager [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 944.028372] env[62096]: ERROR nova.compute.manager [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] raise self.value [ 944.028372] env[62096]: ERROR nova.compute.manager [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 944.028372] env[62096]: ERROR nova.compute.manager [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] updated_port = self._update_port( [ 944.028372] env[62096]: ERROR nova.compute.manager [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 944.028372] env[62096]: ERROR nova.compute.manager [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] _ensure_no_port_binding_failure(port) [ 944.028372] env[62096]: ERROR nova.compute.manager [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 944.028372] env[62096]: ERROR nova.compute.manager [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] raise exception.PortBindingFailed(port_id=port['id']) [ 944.028372] env[62096]: ERROR nova.compute.manager [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] nova.exception.PortBindingFailed: Binding failed for port 1d4e854d-9749-4e8b-9a8a-eb9a812ee623, please check neutron logs for more information. [ 944.028372] env[62096]: ERROR nova.compute.manager [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] [ 944.028372] env[62096]: DEBUG nova.compute.utils [None req-abde4e35-3d25-4bdf-ac75-6b77642d395c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] Binding failed for port 1d4e854d-9749-4e8b-9a8a-eb9a812ee623, please check neutron logs for more information. {{(pid=62096) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 944.029339] env[62096]: DEBUG nova.compute.manager [None req-abde4e35-3d25-4bdf-ac75-6b77642d395c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] Build of instance 6b8a00ab-b6be-46ad-989d-81692d1d7556 was re-scheduled: Binding failed for port 1d4e854d-9749-4e8b-9a8a-eb9a812ee623, please check neutron logs for more information. {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 944.029747] env[62096]: DEBUG nova.compute.manager [None req-abde4e35-3d25-4bdf-ac75-6b77642d395c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] Unplugging VIFs for instance {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 944.029968] env[62096]: DEBUG oslo_concurrency.lockutils [None req-abde4e35-3d25-4bdf-ac75-6b77642d395c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Acquiring lock "refresh_cache-6b8a00ab-b6be-46ad-989d-81692d1d7556" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 944.030235] env[62096]: DEBUG oslo_concurrency.lockutils [None req-abde4e35-3d25-4bdf-ac75-6b77642d395c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Acquired lock "refresh_cache-6b8a00ab-b6be-46ad-989d-81692d1d7556" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 944.030311] env[62096]: DEBUG nova.network.neutron [None req-abde4e35-3d25-4bdf-ac75-6b77642d395c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 944.031073] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-abde4e35-3d25-4bdf-ac75-6b77642d395c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg fd053fdf2f5d43f39c079dffe3924548 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 944.036901] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fd053fdf2f5d43f39c079dffe3924548 [ 944.065770] env[62096]: INFO nova.compute.manager [-] [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] Took 1.02 seconds to deallocate network for instance. [ 944.067942] env[62096]: DEBUG nova.compute.claims [None req-ee01a51e-dfc7-48ec-b8ca-f308df27a4d0 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] Aborting claim: {{(pid=62096) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 944.068159] env[62096]: DEBUG oslo_concurrency.lockutils [None req-ee01a51e-dfc7-48ec-b8ca-f308df27a4d0 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 944.068381] env[62096]: DEBUG oslo_concurrency.lockutils [None req-ee01a51e-dfc7-48ec-b8ca-f308df27a4d0 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 944.070148] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee01a51e-dfc7-48ec-b8ca-f308df27a4d0 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Expecting reply to msg a78201a5ee55426588cd83ddd5977f32 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 944.100591] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a78201a5ee55426588cd83ddd5977f32 [ 944.336804] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] [instance: 9f3ce570-878f-48bf-a08c-0387b0556785] Preparing fetch location {{(pid=62096) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 944.337065] env[62096]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Creating directory with path [datastore1] vmware_temp/3c6c97e2-2d41-4230-ad57-bf4b07c65094/fb764baa-3805-45c4-a694-aa91b0932110 {{(pid=62096) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 944.337303] env[62096]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3df6e254-4efb-474e-b560-1d0015877630 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.359737] env[62096]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Created directory with path [datastore1] vmware_temp/3c6c97e2-2d41-4230-ad57-bf4b07c65094/fb764baa-3805-45c4-a694-aa91b0932110 {{(pid=62096) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 944.359907] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] [instance: 9f3ce570-878f-48bf-a08c-0387b0556785] Fetch image to [datastore1] vmware_temp/3c6c97e2-2d41-4230-ad57-bf4b07c65094/fb764baa-3805-45c4-a694-aa91b0932110/tmp-sparse.vmdk {{(pid=62096) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 944.360083] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] [instance: 9f3ce570-878f-48bf-a08c-0387b0556785] Downloading image file data fb764baa-3805-45c4-a694-aa91b0932110 to [datastore1] vmware_temp/3c6c97e2-2d41-4230-ad57-bf4b07c65094/fb764baa-3805-45c4-a694-aa91b0932110/tmp-sparse.vmdk on the data store datastore1 {{(pid=62096) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 944.360859] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-730b945e-905d-4a74-951f-ade4d6eb4067 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.368551] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91280f12-ca01-4565-89e3-e34641e04e7b {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.377822] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4e9e334-dad9-4c22-99eb-13a297b4dd50 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.410035] env[62096]: DEBUG oslo_concurrency.lockutils [None req-0fdf712d-9ab7-47de-a5a9-f02790031e75 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Lock "0f275603-acfc-43db-8a71-a17af8e837b4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 77.323s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 944.411090] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87c67bea-4f59-49af-a5ff-69ce4ad0dceb {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.417571] env[62096]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-4a72835c-5e78-4d16-a7af-7b0d412a819f {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.440411] env[62096]: DEBUG nova.virt.vmwareapi.images [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] [instance: 9f3ce570-878f-48bf-a08c-0387b0556785] Downloading image file data fb764baa-3805-45c4-a694-aa91b0932110 to the data store datastore1 {{(pid=62096) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 944.498311] env[62096]: DEBUG oslo_vmware.rw_handles [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/3c6c97e2-2d41-4230-ad57-bf4b07c65094/fb764baa-3805-45c4-a694-aa91b0932110/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62096) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 944.580066] env[62096]: INFO nova.compute.manager [None req-66abb748-9105-4dd1-b487-dfa24b044df5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: 2d42e5f7-b293-4133-b279-63a8320bc19d] Took 1.08 seconds to deallocate network for instance. [ 944.583058] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-66abb748-9105-4dd1-b487-dfa24b044df5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg 4820762333d54bddadda25cde96775dc in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 944.610143] env[62096]: DEBUG nova.network.neutron [None req-abde4e35-3d25-4bdf-ac75-6b77642d395c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 944.630875] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4820762333d54bddadda25cde96775dc [ 944.686855] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4da375e5-58ed-4d5c-9450-ebe4d22cde12 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.695630] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-725fd01b-c441-4b88-afbf-0c026773511c {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.730629] env[62096]: DEBUG nova.network.neutron [None req-abde4e35-3d25-4bdf-ac75-6b77642d395c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 944.731145] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-abde4e35-3d25-4bdf-ac75-6b77642d395c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg 3db3de93a04b40f7a078de222875360a in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 944.733416] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57863a26-42e3-468b-910f-7e3c14288057 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.742449] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3db3de93a04b40f7a078de222875360a [ 944.746833] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d08127c-03d2-40aa-8c84-e776a1a0835d {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.764202] env[62096]: DEBUG nova.compute.provider_tree [None req-ee01a51e-dfc7-48ec-b8ca-f308df27a4d0 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 944.764718] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee01a51e-dfc7-48ec-b8ca-f308df27a4d0 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Expecting reply to msg d569e02d8a454a7eb9b43735135c37a8 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 944.776038] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d569e02d8a454a7eb9b43735135c37a8 [ 945.096216] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-66abb748-9105-4dd1-b487-dfa24b044df5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg 22fc2b0e73d6441f94f0524e1207cf86 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 945.132031] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 22fc2b0e73d6441f94f0524e1207cf86 [ 945.238310] env[62096]: DEBUG oslo_concurrency.lockutils [None req-abde4e35-3d25-4bdf-ac75-6b77642d395c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Releasing lock "refresh_cache-6b8a00ab-b6be-46ad-989d-81692d1d7556" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 945.238527] env[62096]: DEBUG nova.compute.manager [None req-abde4e35-3d25-4bdf-ac75-6b77642d395c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 945.238702] env[62096]: DEBUG nova.compute.manager [None req-abde4e35-3d25-4bdf-ac75-6b77642d395c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 945.238924] env[62096]: DEBUG nova.network.neutron [None req-abde4e35-3d25-4bdf-ac75-6b77642d395c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 945.255705] env[62096]: DEBUG nova.network.neutron [None req-abde4e35-3d25-4bdf-ac75-6b77642d395c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 945.256360] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-abde4e35-3d25-4bdf-ac75-6b77642d395c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg 211456c8b543413fabd22d88ab8fd589 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 945.264647] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 211456c8b543413fabd22d88ab8fd589 [ 945.272797] env[62096]: DEBUG nova.scheduler.client.report [None req-ee01a51e-dfc7-48ec-b8ca-f308df27a4d0 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 945.276684] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee01a51e-dfc7-48ec-b8ca-f308df27a4d0 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Expecting reply to msg 30d99a0760ce43e78562c785161e037c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 945.291562] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 30d99a0760ce43e78562c785161e037c [ 945.353794] env[62096]: DEBUG oslo_vmware.rw_handles [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Completed reading data from the image iterator. {{(pid=62096) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 945.354050] env[62096]: DEBUG oslo_vmware.rw_handles [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/3c6c97e2-2d41-4230-ad57-bf4b07c65094/fb764baa-3805-45c4-a694-aa91b0932110/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62096) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 945.495819] env[62096]: DEBUG nova.virt.vmwareapi.images [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] [instance: 9f3ce570-878f-48bf-a08c-0387b0556785] Downloaded image file data fb764baa-3805-45c4-a694-aa91b0932110 to vmware_temp/3c6c97e2-2d41-4230-ad57-bf4b07c65094/fb764baa-3805-45c4-a694-aa91b0932110/tmp-sparse.vmdk on the data store datastore1 {{(pid=62096) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 945.498627] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] [instance: 9f3ce570-878f-48bf-a08c-0387b0556785] Caching image {{(pid=62096) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 945.498881] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Copying Virtual Disk [datastore1] vmware_temp/3c6c97e2-2d41-4230-ad57-bf4b07c65094/fb764baa-3805-45c4-a694-aa91b0932110/tmp-sparse.vmdk to [datastore1] vmware_temp/3c6c97e2-2d41-4230-ad57-bf4b07c65094/fb764baa-3805-45c4-a694-aa91b0932110/fb764baa-3805-45c4-a694-aa91b0932110.vmdk {{(pid=62096) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 945.499176] env[62096]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e5bdd687-25cd-455c-81f0-ca613b720372 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.506537] env[62096]: DEBUG oslo_vmware.api [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Waiting for the task: (returnval){ [ 945.506537] env[62096]: value = "task-397478" [ 945.506537] env[62096]: _type = "Task" [ 945.506537] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.515675] env[62096]: DEBUG oslo_vmware.api [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Task: {'id': task-397478, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.617580] env[62096]: INFO nova.scheduler.client.report [None req-66abb748-9105-4dd1-b487-dfa24b044df5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Deleted allocations for instance 2d42e5f7-b293-4133-b279-63a8320bc19d [ 945.624054] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-66abb748-9105-4dd1-b487-dfa24b044df5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg d101b83646454d80bed47dd40b6f6ee8 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 945.637315] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d101b83646454d80bed47dd40b6f6ee8 [ 945.759175] env[62096]: DEBUG nova.network.neutron [None req-abde4e35-3d25-4bdf-ac75-6b77642d395c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 945.759747] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-abde4e35-3d25-4bdf-ac75-6b77642d395c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg 62f30c236e524b999f76e0cd2e2c97bc in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 945.768145] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 62f30c236e524b999f76e0cd2e2c97bc [ 945.779714] env[62096]: DEBUG oslo_concurrency.lockutils [None req-ee01a51e-dfc7-48ec-b8ca-f308df27a4d0 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.711s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 945.780601] env[62096]: ERROR nova.compute.manager [None req-ee01a51e-dfc7-48ec-b8ca-f308df27a4d0 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 6111dbe7-582d-42af-9aeb-ca86af1a7404, please check neutron logs for more information. [ 945.780601] env[62096]: ERROR nova.compute.manager [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] Traceback (most recent call last): [ 945.780601] env[62096]: ERROR nova.compute.manager [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 945.780601] env[62096]: ERROR nova.compute.manager [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] self.driver.spawn(context, instance, image_meta, [ 945.780601] env[62096]: ERROR nova.compute.manager [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 945.780601] env[62096]: ERROR nova.compute.manager [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] self._vmops.spawn(context, instance, image_meta, injected_files, [ 945.780601] env[62096]: ERROR nova.compute.manager [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 945.780601] env[62096]: ERROR nova.compute.manager [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] vm_ref = self.build_virtual_machine(instance, [ 945.780601] env[62096]: ERROR nova.compute.manager [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 945.780601] env[62096]: ERROR nova.compute.manager [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] vif_infos = vmwarevif.get_vif_info(self._session, [ 945.780601] env[62096]: ERROR nova.compute.manager [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 945.781038] env[62096]: ERROR nova.compute.manager [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] for vif in network_info: [ 945.781038] env[62096]: ERROR nova.compute.manager [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 945.781038] env[62096]: ERROR nova.compute.manager [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] return self._sync_wrapper(fn, *args, **kwargs) [ 945.781038] env[62096]: ERROR nova.compute.manager [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 945.781038] env[62096]: ERROR nova.compute.manager [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] self.wait() [ 945.781038] env[62096]: ERROR nova.compute.manager [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 945.781038] env[62096]: ERROR nova.compute.manager [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] self[:] = self._gt.wait() [ 945.781038] env[62096]: ERROR nova.compute.manager [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 945.781038] env[62096]: ERROR nova.compute.manager [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] return self._exit_event.wait() [ 945.781038] env[62096]: ERROR nova.compute.manager [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 945.781038] env[62096]: ERROR nova.compute.manager [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] current.throw(*self._exc) [ 945.781038] env[62096]: ERROR nova.compute.manager [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 945.781038] env[62096]: ERROR nova.compute.manager [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] result = function(*args, **kwargs) [ 945.781459] env[62096]: ERROR nova.compute.manager [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 945.781459] env[62096]: ERROR nova.compute.manager [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] return func(*args, **kwargs) [ 945.781459] env[62096]: ERROR nova.compute.manager [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 945.781459] env[62096]: ERROR nova.compute.manager [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] raise e [ 945.781459] env[62096]: ERROR nova.compute.manager [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 945.781459] env[62096]: ERROR nova.compute.manager [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] nwinfo = self.network_api.allocate_for_instance( [ 945.781459] env[62096]: ERROR nova.compute.manager [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 945.781459] env[62096]: ERROR nova.compute.manager [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] created_port_ids = self._update_ports_for_instance( [ 945.781459] env[62096]: ERROR nova.compute.manager [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 945.781459] env[62096]: ERROR nova.compute.manager [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] with excutils.save_and_reraise_exception(): [ 945.781459] env[62096]: ERROR nova.compute.manager [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 945.781459] env[62096]: ERROR nova.compute.manager [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] self.force_reraise() [ 945.781459] env[62096]: ERROR nova.compute.manager [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 945.781890] env[62096]: ERROR nova.compute.manager [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] raise self.value [ 945.781890] env[62096]: ERROR nova.compute.manager [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 945.781890] env[62096]: ERROR nova.compute.manager [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] updated_port = self._update_port( [ 945.781890] env[62096]: ERROR nova.compute.manager [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 945.781890] env[62096]: ERROR nova.compute.manager [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] _ensure_no_port_binding_failure(port) [ 945.781890] env[62096]: ERROR nova.compute.manager [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 945.781890] env[62096]: ERROR nova.compute.manager [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] raise exception.PortBindingFailed(port_id=port['id']) [ 945.781890] env[62096]: ERROR nova.compute.manager [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] nova.exception.PortBindingFailed: Binding failed for port 6111dbe7-582d-42af-9aeb-ca86af1a7404, please check neutron logs for more information. [ 945.781890] env[62096]: ERROR nova.compute.manager [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] [ 945.781890] env[62096]: DEBUG nova.compute.utils [None req-ee01a51e-dfc7-48ec-b8ca-f308df27a4d0 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] Binding failed for port 6111dbe7-582d-42af-9aeb-ca86af1a7404, please check neutron logs for more information. {{(pid=62096) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 945.783532] env[62096]: DEBUG nova.compute.manager [None req-ee01a51e-dfc7-48ec-b8ca-f308df27a4d0 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] Build of instance a960bdaa-ed46-4f5e-ad1a-56b06589c362 was re-scheduled: Binding failed for port 6111dbe7-582d-42af-9aeb-ca86af1a7404, please check neutron logs for more information. {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 945.784142] env[62096]: DEBUG nova.compute.manager [None req-ee01a51e-dfc7-48ec-b8ca-f308df27a4d0 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] Unplugging VIFs for instance {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 945.784461] env[62096]: DEBUG oslo_concurrency.lockutils [None req-ee01a51e-dfc7-48ec-b8ca-f308df27a4d0 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Acquiring lock "refresh_cache-a960bdaa-ed46-4f5e-ad1a-56b06589c362" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 945.784665] env[62096]: DEBUG oslo_concurrency.lockutils [None req-ee01a51e-dfc7-48ec-b8ca-f308df27a4d0 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Acquired lock "refresh_cache-a960bdaa-ed46-4f5e-ad1a-56b06589c362" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 945.784884] env[62096]: DEBUG nova.network.neutron [None req-ee01a51e-dfc7-48ec-b8ca-f308df27a4d0 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 945.785426] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee01a51e-dfc7-48ec-b8ca-f308df27a4d0 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Expecting reply to msg 01a1110893d94b418db63d1d7c3a0448 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 945.794086] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 01a1110893d94b418db63d1d7c3a0448 [ 945.993904] env[62096]: DEBUG oslo_concurrency.lockutils [None req-01739955-c4cc-4cd7-9c77-96b1dea4ad7a tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Acquiring lock "9968505a-c4cf-4ed5-9bec-0853d1d2840c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 945.994180] env[62096]: DEBUG oslo_concurrency.lockutils [None req-01739955-c4cc-4cd7-9c77-96b1dea4ad7a tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Lock "9968505a-c4cf-4ed5-9bec-0853d1d2840c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 945.994672] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-01739955-c4cc-4cd7-9c77-96b1dea4ad7a tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg bfffd0ad9c204dbe9857295579e4319e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 946.006471] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bfffd0ad9c204dbe9857295579e4319e [ 946.016286] env[62096]: DEBUG oslo_vmware.api [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Task: {'id': task-397478, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.125368] env[62096]: DEBUG oslo_concurrency.lockutils [None req-66abb748-9105-4dd1-b487-dfa24b044df5 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Lock "2d42e5f7-b293-4133-b279-63a8320bc19d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 48.446s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 946.262527] env[62096]: INFO nova.compute.manager [None req-abde4e35-3d25-4bdf-ac75-6b77642d395c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: 6b8a00ab-b6be-46ad-989d-81692d1d7556] Took 1.02 seconds to deallocate network for instance. [ 946.264287] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-abde4e35-3d25-4bdf-ac75-6b77642d395c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg c218f9a95a684c2992f5875a6c8dfa3e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 946.297497] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c218f9a95a684c2992f5875a6c8dfa3e [ 946.302498] env[62096]: DEBUG nova.network.neutron [None req-ee01a51e-dfc7-48ec-b8ca-f308df27a4d0 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 946.387568] env[62096]: DEBUG nova.network.neutron [None req-ee01a51e-dfc7-48ec-b8ca-f308df27a4d0 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 946.388116] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee01a51e-dfc7-48ec-b8ca-f308df27a4d0 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Expecting reply to msg 875fb37c93e64152b212ce1adb6ad11f in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 946.396709] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 875fb37c93e64152b212ce1adb6ad11f [ 946.497326] env[62096]: DEBUG nova.compute.manager [None req-01739955-c4cc-4cd7-9c77-96b1dea4ad7a tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] Starting instance... {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 946.498983] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-01739955-c4cc-4cd7-9c77-96b1dea4ad7a tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg a57796eb4aec45ec85074f34d7bfe683 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 946.518772] env[62096]: DEBUG oslo_vmware.api [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Task: {'id': task-397478, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.707779} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.519014] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Copied Virtual Disk [datastore1] vmware_temp/3c6c97e2-2d41-4230-ad57-bf4b07c65094/fb764baa-3805-45c4-a694-aa91b0932110/tmp-sparse.vmdk to [datastore1] vmware_temp/3c6c97e2-2d41-4230-ad57-bf4b07c65094/fb764baa-3805-45c4-a694-aa91b0932110/fb764baa-3805-45c4-a694-aa91b0932110.vmdk {{(pid=62096) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 946.519184] env[62096]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Deleting the datastore file [datastore1] vmware_temp/3c6c97e2-2d41-4230-ad57-bf4b07c65094/fb764baa-3805-45c4-a694-aa91b0932110/tmp-sparse.vmdk {{(pid=62096) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 946.519425] env[62096]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ad59fda5-0c90-4b43-89fc-d8112be7ae08 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.526516] env[62096]: DEBUG oslo_vmware.api [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Waiting for the task: (returnval){ [ 946.526516] env[62096]: value = "task-397479" [ 946.526516] env[62096]: _type = "Task" [ 946.526516] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 946.532214] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a57796eb4aec45ec85074f34d7bfe683 [ 946.536184] env[62096]: DEBUG oslo_vmware.api [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Task: {'id': task-397479, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.769606] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-abde4e35-3d25-4bdf-ac75-6b77642d395c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg 63d5bad2300d4b779bdc93362aada29f in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 946.802156] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 63d5bad2300d4b779bdc93362aada29f [ 946.892037] env[62096]: DEBUG oslo_concurrency.lockutils [None req-ee01a51e-dfc7-48ec-b8ca-f308df27a4d0 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Releasing lock "refresh_cache-a960bdaa-ed46-4f5e-ad1a-56b06589c362" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 946.892037] env[62096]: DEBUG nova.compute.manager [None req-ee01a51e-dfc7-48ec-b8ca-f308df27a4d0 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 946.892037] env[62096]: DEBUG nova.compute.manager [None req-ee01a51e-dfc7-48ec-b8ca-f308df27a4d0 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 946.892037] env[62096]: DEBUG nova.network.neutron [None req-ee01a51e-dfc7-48ec-b8ca-f308df27a4d0 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 946.906408] env[62096]: DEBUG nova.network.neutron [None req-ee01a51e-dfc7-48ec-b8ca-f308df27a4d0 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 946.907009] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee01a51e-dfc7-48ec-b8ca-f308df27a4d0 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Expecting reply to msg bd193180042e4b14b9d73a74b2d9113a in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 946.913384] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bd193180042e4b14b9d73a74b2d9113a [ 947.018328] env[62096]: DEBUG oslo_concurrency.lockutils [None req-01739955-c4cc-4cd7-9c77-96b1dea4ad7a tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 947.018593] env[62096]: DEBUG oslo_concurrency.lockutils [None req-01739955-c4cc-4cd7-9c77-96b1dea4ad7a tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 947.020155] env[62096]: INFO nova.compute.claims [None req-01739955-c4cc-4cd7-9c77-96b1dea4ad7a tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 947.021827] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-01739955-c4cc-4cd7-9c77-96b1dea4ad7a tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg 5133325793c349f58fe2b4af3c9bf9d9 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 947.036395] env[62096]: DEBUG oslo_vmware.api [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Task: {'id': task-397479, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.022732} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 947.036676] env[62096]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Deleted the datastore file {{(pid=62096) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 947.036917] env[62096]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Moving file from [datastore1] vmware_temp/3c6c97e2-2d41-4230-ad57-bf4b07c65094/fb764baa-3805-45c4-a694-aa91b0932110 to [datastore1] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110. {{(pid=62096) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 947.037188] env[62096]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-6a9a3b8f-c019-4173-a2e4-4859c6f9be11 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.043539] env[62096]: DEBUG oslo_vmware.api [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Waiting for the task: (returnval){ [ 947.043539] env[62096]: value = "task-397480" [ 947.043539] env[62096]: _type = "Task" [ 947.043539] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 947.051890] env[62096]: DEBUG oslo_vmware.api [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Task: {'id': task-397480, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.061006] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5133325793c349f58fe2b4af3c9bf9d9 [ 947.289277] env[62096]: INFO nova.scheduler.client.report [None req-abde4e35-3d25-4bdf-ac75-6b77642d395c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Deleted allocations for instance 6b8a00ab-b6be-46ad-989d-81692d1d7556 [ 947.295669] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-abde4e35-3d25-4bdf-ac75-6b77642d395c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg 46b57bda042e43629e9a3583a88fd3d5 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 947.311192] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 46b57bda042e43629e9a3583a88fd3d5 [ 947.344216] env[62096]: DEBUG oslo_concurrency.lockutils [None req-cf26b410-85fb-4494-a97e-7e8a698de974 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Acquiring lock "db3c7e13-12d9-466d-9823-a0a107949588" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 947.344453] env[62096]: DEBUG oslo_concurrency.lockutils [None req-cf26b410-85fb-4494-a97e-7e8a698de974 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Lock "db3c7e13-12d9-466d-9823-a0a107949588" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 947.344902] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-cf26b410-85fb-4494-a97e-7e8a698de974 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg 44cba6e9f50a4281ad1bcf20f4c9d4a7 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 947.357690] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 44cba6e9f50a4281ad1bcf20f4c9d4a7 [ 947.408995] env[62096]: DEBUG nova.network.neutron [None req-ee01a51e-dfc7-48ec-b8ca-f308df27a4d0 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 947.409493] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee01a51e-dfc7-48ec-b8ca-f308df27a4d0 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Expecting reply to msg 82f04218b6344672b6d108b9402f9cb3 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 947.417801] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 82f04218b6344672b6d108b9402f9cb3 [ 947.526446] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-01739955-c4cc-4cd7-9c77-96b1dea4ad7a tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg c8fa96aef3e94a13a449329330733ef1 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 947.533746] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c8fa96aef3e94a13a449329330733ef1 [ 947.554163] env[62096]: DEBUG oslo_vmware.api [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Task: {'id': task-397480, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.031736} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 947.554428] env[62096]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] File moved {{(pid=62096) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 947.554724] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] [instance: 9f3ce570-878f-48bf-a08c-0387b0556785] Cleaning up location [datastore1] vmware_temp/3c6c97e2-2d41-4230-ad57-bf4b07c65094 {{(pid=62096) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 947.554897] env[62096]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Deleting the datastore file [datastore1] vmware_temp/3c6c97e2-2d41-4230-ad57-bf4b07c65094 {{(pid=62096) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 947.555166] env[62096]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f61c68a6-eaaa-4887-b0a0-77eb41c349bf {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.561683] env[62096]: DEBUG oslo_vmware.api [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Waiting for the task: (returnval){ [ 947.561683] env[62096]: value = "task-397481" [ 947.561683] env[62096]: _type = "Task" [ 947.561683] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 947.569409] env[62096]: DEBUG oslo_vmware.api [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Task: {'id': task-397481, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.797228] env[62096]: DEBUG oslo_concurrency.lockutils [None req-abde4e35-3d25-4bdf-ac75-6b77642d395c tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Lock "6b8a00ab-b6be-46ad-989d-81692d1d7556" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 36.400s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 947.847132] env[62096]: DEBUG nova.compute.manager [None req-cf26b410-85fb-4494-a97e-7e8a698de974 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: db3c7e13-12d9-466d-9823-a0a107949588] Starting instance... {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 947.848830] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-cf26b410-85fb-4494-a97e-7e8a698de974 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg dca52c10dcab4f91985aca96116d6fd3 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 947.877605] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dca52c10dcab4f91985aca96116d6fd3 [ 947.912028] env[62096]: INFO nova.compute.manager [None req-ee01a51e-dfc7-48ec-b8ca-f308df27a4d0 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] [instance: a960bdaa-ed46-4f5e-ad1a-56b06589c362] Took 1.02 seconds to deallocate network for instance. [ 947.913628] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee01a51e-dfc7-48ec-b8ca-f308df27a4d0 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Expecting reply to msg 250c847bd0b147c082e35b3f19291336 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 947.943796] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 250c847bd0b147c082e35b3f19291336 [ 948.071393] env[62096]: DEBUG oslo_vmware.api [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Task: {'id': task-397481, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.026863} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.071654] env[62096]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Deleted the datastore file {{(pid=62096) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 948.072394] env[62096]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c751a9ee-a2b2-4cd0-9750-5436a07b7d67 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.079232] env[62096]: DEBUG oslo_vmware.api [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Waiting for the task: (returnval){ [ 948.079232] env[62096]: value = "session[5264b2d0-7e3b-3d3c-bfd1-bd630cad6eae]52ba9be9-cd7c-281f-7a15-34650ae04353" [ 948.079232] env[62096]: _type = "Task" [ 948.079232] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.086452] env[62096]: DEBUG oslo_vmware.api [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Task: {'id': session[5264b2d0-7e3b-3d3c-bfd1-bd630cad6eae]52ba9be9-cd7c-281f-7a15-34650ae04353, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.098431] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-603ca139-ac45-4d14-b7ab-b999a12274e2 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.105032] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c30a0add-8814-4742-9545-88d18f894979 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.136096] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7661c871-dddd-4d40-9f71-6e58cb1c2a0e {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.143718] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ed99f34-e0af-4d54-abe2-456b746a03b9 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.159901] env[62096]: DEBUG nova.compute.provider_tree [None req-01739955-c4cc-4cd7-9c77-96b1dea4ad7a tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Updating inventory in ProviderTree for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 948.160531] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-01739955-c4cc-4cd7-9c77-96b1dea4ad7a tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg f571e2e67a14477c81b62ce482a7bb06 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 948.174597] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f571e2e67a14477c81b62ce482a7bb06 [ 948.364817] env[62096]: DEBUG oslo_concurrency.lockutils [None req-cf26b410-85fb-4494-a97e-7e8a698de974 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 948.418158] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee01a51e-dfc7-48ec-b8ca-f308df27a4d0 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Expecting reply to msg 3185a0fd764945868e18c8f07e5a7c4a in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 948.447959] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3185a0fd764945868e18c8f07e5a7c4a [ 948.591616] env[62096]: DEBUG oslo_vmware.api [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Task: {'id': session[5264b2d0-7e3b-3d3c-bfd1-bd630cad6eae]52ba9be9-cd7c-281f-7a15-34650ae04353, 'name': SearchDatastore_Task, 'duration_secs': 0.008018} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.591855] env[62096]: DEBUG oslo_concurrency.lockutils [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Releasing lock "[datastore1] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110/fb764baa-3805-45c4-a694-aa91b0932110.vmdk" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 948.592156] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110/fb764baa-3805-45c4-a694-aa91b0932110.vmdk to [datastore1] 9f3ce570-878f-48bf-a08c-0387b0556785/9f3ce570-878f-48bf-a08c-0387b0556785.vmdk {{(pid=62096) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 948.592412] env[62096]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-99521087-b954-4419-b49d-130797025c88 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.598438] env[62096]: DEBUG oslo_vmware.api [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Waiting for the task: (returnval){ [ 948.598438] env[62096]: value = "task-397482" [ 948.598438] env[62096]: _type = "Task" [ 948.598438] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.605951] env[62096]: DEBUG oslo_vmware.api [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Task: {'id': task-397482, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.679450] env[62096]: ERROR nova.scheduler.client.report [None req-01739955-c4cc-4cd7-9c77-96b1dea4ad7a tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [req-e5a60201-d505-483e-8838-b430e765bb89] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 6eefe13c-ab55-4c03-987f-47a62756c3b3. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-e5a60201-d505-483e-8838-b430e765bb89"}]} [ 948.695508] env[62096]: DEBUG nova.scheduler.client.report [None req-01739955-c4cc-4cd7-9c77-96b1dea4ad7a tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Refreshing inventories for resource provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 948.709917] env[62096]: DEBUG nova.scheduler.client.report [None req-01739955-c4cc-4cd7-9c77-96b1dea4ad7a tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Updating ProviderTree inventory for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 948.710154] env[62096]: DEBUG nova.compute.provider_tree [None req-01739955-c4cc-4cd7-9c77-96b1dea4ad7a tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Updating inventory in ProviderTree for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 948.721093] env[62096]: DEBUG nova.scheduler.client.report [None req-01739955-c4cc-4cd7-9c77-96b1dea4ad7a tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Refreshing aggregate associations for resource provider 6eefe13c-ab55-4c03-987f-47a62756c3b3, aggregates: None {{(pid=62096) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 948.741007] env[62096]: DEBUG nova.scheduler.client.report [None req-01739955-c4cc-4cd7-9c77-96b1dea4ad7a tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Refreshing trait associations for resource provider 6eefe13c-ab55-4c03-987f-47a62756c3b3, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO {{(pid=62096) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 948.858286] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-516ac3ee-bef8-435a-86ab-22405d114abb {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.867582] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f11589d-979a-42b2-8118-13d1190e82aa {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.899530] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a21de25-56d0-4344-a71d-32924b9abbc6 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.909117] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eee92f3d-0ae6-4a6d-ae5e-60db54c8494f {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.928266] env[62096]: DEBUG nova.compute.provider_tree [None req-01739955-c4cc-4cd7-9c77-96b1dea4ad7a tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Updating inventory in ProviderTree for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 948.928831] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-01739955-c4cc-4cd7-9c77-96b1dea4ad7a tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg 41817c150ff74635b6ca5305e0dd0440 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 948.938775] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 41817c150ff74635b6ca5305e0dd0440 [ 948.949349] env[62096]: INFO nova.scheduler.client.report [None req-ee01a51e-dfc7-48ec-b8ca-f308df27a4d0 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Deleted allocations for instance a960bdaa-ed46-4f5e-ad1a-56b06589c362 [ 948.957195] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee01a51e-dfc7-48ec-b8ca-f308df27a4d0 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Expecting reply to msg d600ff83438447a6bc552b5ac78ab687 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 948.971708] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d600ff83438447a6bc552b5ac78ab687 [ 949.108492] env[62096]: DEBUG oslo_vmware.api [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Task: {'id': task-397482, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.437675} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.109041] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/fb764baa-3805-45c4-a694-aa91b0932110/fb764baa-3805-45c4-a694-aa91b0932110.vmdk to [datastore1] 9f3ce570-878f-48bf-a08c-0387b0556785/9f3ce570-878f-48bf-a08c-0387b0556785.vmdk {{(pid=62096) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 949.109446] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] [instance: 9f3ce570-878f-48bf-a08c-0387b0556785] Extending root virtual disk to 1048576 {{(pid=62096) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 949.109834] env[62096]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ffa9ea40-da8c-46a6-84b6-11a720ef89dc {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.116085] env[62096]: DEBUG oslo_vmware.api [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Waiting for the task: (returnval){ [ 949.116085] env[62096]: value = "task-397483" [ 949.116085] env[62096]: _type = "Task" [ 949.116085] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.124929] env[62096]: DEBUG oslo_vmware.api [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Task: {'id': task-397483, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.377362] env[62096]: DEBUG oslo_concurrency.lockutils [None req-2bf30cf6-b5fd-47ba-bb40-34c6f1f16b6f tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Acquiring lock "8a9e2396-2585-4fb8-9f92-76d4d1578a95" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 949.377647] env[62096]: DEBUG oslo_concurrency.lockutils [None req-2bf30cf6-b5fd-47ba-bb40-34c6f1f16b6f tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Lock "8a9e2396-2585-4fb8-9f92-76d4d1578a95" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 949.378059] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-2bf30cf6-b5fd-47ba-bb40-34c6f1f16b6f tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg 0817aa29c9b54acb8ad794bc3f09695a in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 949.386195] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0817aa29c9b54acb8ad794bc3f09695a [ 949.452588] env[62096]: ERROR nova.scheduler.client.report [None req-01739955-c4cc-4cd7-9c77-96b1dea4ad7a tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [req-14ecb759-5f27-425d-982d-a766a4dea929] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 6eefe13c-ab55-4c03-987f-47a62756c3b3. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-14ecb759-5f27-425d-982d-a766a4dea929"}]} [ 949.459297] env[62096]: DEBUG oslo_concurrency.lockutils [None req-ee01a51e-dfc7-48ec-b8ca-f308df27a4d0 tempest-ImagesTestJSON-417897785 tempest-ImagesTestJSON-417897785-project-member] Lock "a960bdaa-ed46-4f5e-ad1a-56b06589c362" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 36.166s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 949.467539] env[62096]: DEBUG nova.scheduler.client.report [None req-01739955-c4cc-4cd7-9c77-96b1dea4ad7a tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Refreshing inventories for resource provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 949.479750] env[62096]: DEBUG nova.scheduler.client.report [None req-01739955-c4cc-4cd7-9c77-96b1dea4ad7a tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Updating ProviderTree inventory for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 949.479953] env[62096]: DEBUG nova.compute.provider_tree [None req-01739955-c4cc-4cd7-9c77-96b1dea4ad7a tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Updating inventory in ProviderTree for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 949.489233] env[62096]: DEBUG nova.scheduler.client.report [None req-01739955-c4cc-4cd7-9c77-96b1dea4ad7a tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Refreshing aggregate associations for resource provider 6eefe13c-ab55-4c03-987f-47a62756c3b3, aggregates: None {{(pid=62096) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 949.505591] env[62096]: DEBUG nova.scheduler.client.report [None req-01739955-c4cc-4cd7-9c77-96b1dea4ad7a tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Refreshing trait associations for resource provider 6eefe13c-ab55-4c03-987f-47a62756c3b3, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO {{(pid=62096) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 949.571028] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aac58714-745d-4e42-87e0-66c8c95f1980 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.578746] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caa84a38-84a4-4a28-ba82-c46040e0cff4 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.608645] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f01c15b-ec98-4365-abd7-8c8d858845f8 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.615556] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29c5e8c6-6781-431c-826e-6d0b00beb128 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.631982] env[62096]: DEBUG nova.compute.provider_tree [None req-01739955-c4cc-4cd7-9c77-96b1dea4ad7a tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 949.632486] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-01739955-c4cc-4cd7-9c77-96b1dea4ad7a tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg fccc47dcc93e428f837869ebe75d934d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 949.635761] env[62096]: DEBUG oslo_vmware.api [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Task: {'id': task-397483, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.058762} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.636222] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] [instance: 9f3ce570-878f-48bf-a08c-0387b0556785] Extended root virtual disk {{(pid=62096) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 949.637206] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44b6136d-e67d-4664-a643-c08e9c90af34 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.639656] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fccc47dcc93e428f837869ebe75d934d [ 949.656488] env[62096]: DEBUG nova.virt.vmwareapi.volumeops [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] [instance: 9f3ce570-878f-48bf-a08c-0387b0556785] Reconfiguring VM instance instance-00000055 to attach disk [datastore1] 9f3ce570-878f-48bf-a08c-0387b0556785/9f3ce570-878f-48bf-a08c-0387b0556785.vmdk or device None with type sparse {{(pid=62096) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 949.656981] env[62096]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d9e2cc83-1074-4915-8b77-4ff764335a54 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.678034] env[62096]: DEBUG oslo_vmware.api [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Waiting for the task: (returnval){ [ 949.678034] env[62096]: value = "task-397484" [ 949.678034] env[62096]: _type = "Task" [ 949.678034] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.685179] env[62096]: DEBUG oslo_vmware.api [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Task: {'id': task-397484, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.881078] env[62096]: DEBUG nova.compute.manager [None req-2bf30cf6-b5fd-47ba-bb40-34c6f1f16b6f tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] Starting instance... {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 949.882121] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-2bf30cf6-b5fd-47ba-bb40-34c6f1f16b6f tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg d4bbce530aad464596357fc475874430 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 949.916566] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d4bbce530aad464596357fc475874430 [ 950.138060] env[62096]: DEBUG nova.scheduler.client.report [None req-01739955-c4cc-4cd7-9c77-96b1dea4ad7a tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 950.140443] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-01739955-c4cc-4cd7-9c77-96b1dea4ad7a tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg 6538fd319b8945db9ca15aa51f98453d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 950.151705] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6538fd319b8945db9ca15aa51f98453d [ 950.188629] env[62096]: DEBUG oslo_vmware.api [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Task: {'id': task-397484, 'name': ReconfigVM_Task, 'duration_secs': 0.26782} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.189030] env[62096]: DEBUG nova.virt.vmwareapi.volumeops [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] [instance: 9f3ce570-878f-48bf-a08c-0387b0556785] Reconfigured VM instance instance-00000055 to attach disk [datastore1] 9f3ce570-878f-48bf-a08c-0387b0556785/9f3ce570-878f-48bf-a08c-0387b0556785.vmdk or device None with type sparse {{(pid=62096) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 950.189742] env[62096]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-dfadd154-1e38-4d92-8d24-77b62ff27f7b {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.197073] env[62096]: DEBUG oslo_vmware.api [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Waiting for the task: (returnval){ [ 950.197073] env[62096]: value = "task-397485" [ 950.197073] env[62096]: _type = "Task" [ 950.197073] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.204671] env[62096]: DEBUG oslo_vmware.api [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Task: {'id': task-397485, 'name': Rename_Task} progress is 0%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.412660] env[62096]: DEBUG oslo_concurrency.lockutils [None req-2bf30cf6-b5fd-47ba-bb40-34c6f1f16b6f tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 950.643726] env[62096]: DEBUG oslo_concurrency.lockutils [None req-01739955-c4cc-4cd7-9c77-96b1dea4ad7a tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.625s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 950.644255] env[62096]: DEBUG nova.compute.manager [None req-01739955-c4cc-4cd7-9c77-96b1dea4ad7a tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] Start building networks asynchronously for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 950.645905] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-01739955-c4cc-4cd7-9c77-96b1dea4ad7a tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg 273bf3a34aba4a9caff0f11c43407261 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 950.646937] env[62096]: DEBUG oslo_concurrency.lockutils [None req-cf26b410-85fb-4494-a97e-7e8a698de974 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.282s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 950.654885] env[62096]: INFO nova.compute.claims [None req-cf26b410-85fb-4494-a97e-7e8a698de974 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: db3c7e13-12d9-466d-9823-a0a107949588] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 950.654885] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-cf26b410-85fb-4494-a97e-7e8a698de974 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg 9c15215e672740bea088affa76ec1d89 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 950.686213] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 273bf3a34aba4a9caff0f11c43407261 [ 950.692542] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9c15215e672740bea088affa76ec1d89 [ 950.707607] env[62096]: DEBUG oslo_vmware.api [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Task: {'id': task-397485, 'name': Rename_Task, 'duration_secs': 0.122939} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.707607] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] [instance: 9f3ce570-878f-48bf-a08c-0387b0556785] Powering on the VM {{(pid=62096) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 950.707607] env[62096]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0d5ceedb-334a-4044-bbfd-2475bf667bd0 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.713602] env[62096]: DEBUG oslo_vmware.api [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Waiting for the task: (returnval){ [ 950.713602] env[62096]: value = "task-397486" [ 950.713602] env[62096]: _type = "Task" [ 950.713602] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.721053] env[62096]: DEBUG oslo_vmware.api [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Task: {'id': task-397486, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.152635] env[62096]: DEBUG nova.compute.utils [None req-01739955-c4cc-4cd7-9c77-96b1dea4ad7a tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Using /dev/sd instead of None {{(pid=62096) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 951.153266] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-01739955-c4cc-4cd7-9c77-96b1dea4ad7a tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg ec162f2cf9d14463b53fe0027b31828f in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 951.155558] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-cf26b410-85fb-4494-a97e-7e8a698de974 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg 532c70ad4de5413a94391637e715d96d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 951.156197] env[62096]: DEBUG nova.compute.manager [None req-01739955-c4cc-4cd7-9c77-96b1dea4ad7a tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] Allocating IP information in the background. {{(pid=62096) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 951.156362] env[62096]: DEBUG nova.network.neutron [None req-01739955-c4cc-4cd7-9c77-96b1dea4ad7a tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] allocate_for_instance() {{(pid=62096) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 951.164220] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 532c70ad4de5413a94391637e715d96d [ 951.164613] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ec162f2cf9d14463b53fe0027b31828f [ 951.202413] env[62096]: DEBUG nova.policy [None req-01739955-c4cc-4cd7-9c77-96b1dea4ad7a tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '491a2069427f43e79347a70e475e4dd2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e45573130e8e4ce4979b37e1b4c5af9f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62096) authorize /opt/stack/nova/nova/policy.py:203}} [ 951.224516] env[62096]: DEBUG oslo_vmware.api [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Task: {'id': task-397486, 'name': PowerOnVM_Task, 'duration_secs': 0.419658} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.224765] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] [instance: 9f3ce570-878f-48bf-a08c-0387b0556785] Powered on the VM {{(pid=62096) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 951.224966] env[62096]: DEBUG nova.compute.manager [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] [instance: 9f3ce570-878f-48bf-a08c-0387b0556785] Checking state {{(pid=62096) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 951.226104] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-177a1c60-b86c-4562-9865-0fbae60ae1a4 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.233230] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Expecting reply to msg c61f958e579244a09e2fb8496b6724d5 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 951.270432] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c61f958e579244a09e2fb8496b6724d5 [ 951.460315] env[62096]: DEBUG nova.network.neutron [None req-01739955-c4cc-4cd7-9c77-96b1dea4ad7a tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] Successfully created port: b59a00a8-d38c-41d2-8e43-14ab1a189145 {{(pid=62096) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 951.656724] env[62096]: DEBUG nova.compute.manager [None req-01739955-c4cc-4cd7-9c77-96b1dea4ad7a tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] Start building block device mappings for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 951.658606] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-01739955-c4cc-4cd7-9c77-96b1dea4ad7a tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg 032baa63dbf44171a764796e473fc57c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 951.698722] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 032baa63dbf44171a764796e473fc57c [ 951.759816] env[62096]: DEBUG oslo_concurrency.lockutils [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 951.763640] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3f6738f-3113-4e9e-8988-b45da67937b8 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.770298] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-38f13e88-01cf-4f03-bbb4-543be09424f3 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Expecting reply to msg 1bca0bd1104c4edcbca0b00f1737f424 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 951.776750] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ce83f0c-dba0-43f4-96c1-a81fa8a9b7c0 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.780634] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1bca0bd1104c4edcbca0b00f1737f424 [ 951.812351] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad895e81-c852-49b4-ab81-1364287e1969 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.819582] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f6f3c00-ec89-4b5e-8171-23aba8579d34 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.832914] env[62096]: DEBUG nova.compute.provider_tree [None req-cf26b410-85fb-4494-a97e-7e8a698de974 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 951.833257] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-cf26b410-85fb-4494-a97e-7e8a698de974 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg 90eb00a2311d4a99a94d0b8d2bb6a014 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 951.840562] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 90eb00a2311d4a99a94d0b8d2bb6a014 [ 952.144315] env[62096]: DEBUG nova.compute.manager [req-814e1d29-289d-4b84-9a0a-7c53c8129526 req-fb8d596e-5661-49f2-ae15-f0efc7d08063 service nova] [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] Received event network-changed-b59a00a8-d38c-41d2-8e43-14ab1a189145 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 952.144518] env[62096]: DEBUG nova.compute.manager [req-814e1d29-289d-4b84-9a0a-7c53c8129526 req-fb8d596e-5661-49f2-ae15-f0efc7d08063 service nova] [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] Refreshing instance network info cache due to event network-changed-b59a00a8-d38c-41d2-8e43-14ab1a189145. {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 952.144725] env[62096]: DEBUG oslo_concurrency.lockutils [req-814e1d29-289d-4b84-9a0a-7c53c8129526 req-fb8d596e-5661-49f2-ae15-f0efc7d08063 service nova] Acquiring lock "refresh_cache-9968505a-c4cf-4ed5-9bec-0853d1d2840c" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 952.144859] env[62096]: DEBUG oslo_concurrency.lockutils [req-814e1d29-289d-4b84-9a0a-7c53c8129526 req-fb8d596e-5661-49f2-ae15-f0efc7d08063 service nova] Acquired lock "refresh_cache-9968505a-c4cf-4ed5-9bec-0853d1d2840c" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 952.145010] env[62096]: DEBUG nova.network.neutron [req-814e1d29-289d-4b84-9a0a-7c53c8129526 req-fb8d596e-5661-49f2-ae15-f0efc7d08063 service nova] [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] Refreshing network info cache for port b59a00a8-d38c-41d2-8e43-14ab1a189145 {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 952.145433] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-814e1d29-289d-4b84-9a0a-7c53c8129526 req-fb8d596e-5661-49f2-ae15-f0efc7d08063 service nova] Expecting reply to msg 48cbea3caac44c94801a7c9bfa3108f6 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 952.151768] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 48cbea3caac44c94801a7c9bfa3108f6 [ 952.165269] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-01739955-c4cc-4cd7-9c77-96b1dea4ad7a tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg 91c5f6e5371e4567b12b619598210787 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 952.201675] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 91c5f6e5371e4567b12b619598210787 [ 952.273707] env[62096]: DEBUG oslo_concurrency.lockutils [None req-38f13e88-01cf-4f03-bbb4-543be09424f3 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Acquiring lock "9f3ce570-878f-48bf-a08c-0387b0556785" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 952.274034] env[62096]: DEBUG oslo_concurrency.lockutils [None req-38f13e88-01cf-4f03-bbb4-543be09424f3 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Lock "9f3ce570-878f-48bf-a08c-0387b0556785" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 952.274265] env[62096]: DEBUG oslo_concurrency.lockutils [None req-38f13e88-01cf-4f03-bbb4-543be09424f3 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Acquiring lock "9f3ce570-878f-48bf-a08c-0387b0556785-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 952.274450] env[62096]: DEBUG oslo_concurrency.lockutils [None req-38f13e88-01cf-4f03-bbb4-543be09424f3 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Lock "9f3ce570-878f-48bf-a08c-0387b0556785-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 952.274614] env[62096]: DEBUG oslo_concurrency.lockutils [None req-38f13e88-01cf-4f03-bbb4-543be09424f3 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Lock "9f3ce570-878f-48bf-a08c-0387b0556785-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 952.276830] env[62096]: INFO nova.compute.manager [None req-38f13e88-01cf-4f03-bbb4-543be09424f3 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] [instance: 9f3ce570-878f-48bf-a08c-0387b0556785] Terminating instance [ 952.278947] env[62096]: DEBUG oslo_concurrency.lockutils [None req-38f13e88-01cf-4f03-bbb4-543be09424f3 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Acquiring lock "refresh_cache-9f3ce570-878f-48bf-a08c-0387b0556785" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 952.279165] env[62096]: DEBUG oslo_concurrency.lockutils [None req-38f13e88-01cf-4f03-bbb4-543be09424f3 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Acquired lock "refresh_cache-9f3ce570-878f-48bf-a08c-0387b0556785" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 952.279442] env[62096]: DEBUG nova.network.neutron [None req-38f13e88-01cf-4f03-bbb4-543be09424f3 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] [instance: 9f3ce570-878f-48bf-a08c-0387b0556785] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 952.280034] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-38f13e88-01cf-4f03-bbb4-543be09424f3 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Expecting reply to msg fc69344b1dc24ef4b6d8cd210965528a in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 952.286963] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fc69344b1dc24ef4b6d8cd210965528a [ 952.326108] env[62096]: ERROR nova.compute.manager [None req-01739955-c4cc-4cd7-9c77-96b1dea4ad7a tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port b59a00a8-d38c-41d2-8e43-14ab1a189145, please check neutron logs for more information. [ 952.326108] env[62096]: ERROR nova.compute.manager Traceback (most recent call last): [ 952.326108] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 952.326108] env[62096]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 952.326108] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 952.326108] env[62096]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 952.326108] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 952.326108] env[62096]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 952.326108] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 952.326108] env[62096]: ERROR nova.compute.manager self.force_reraise() [ 952.326108] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 952.326108] env[62096]: ERROR nova.compute.manager raise self.value [ 952.326108] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 952.326108] env[62096]: ERROR nova.compute.manager updated_port = self._update_port( [ 952.326108] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 952.326108] env[62096]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 952.326656] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 952.326656] env[62096]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 952.326656] env[62096]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port b59a00a8-d38c-41d2-8e43-14ab1a189145, please check neutron logs for more information. [ 952.326656] env[62096]: ERROR nova.compute.manager [ 952.326656] env[62096]: Traceback (most recent call last): [ 952.326656] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 952.326656] env[62096]: listener.cb(fileno) [ 952.326656] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 952.326656] env[62096]: result = function(*args, **kwargs) [ 952.326656] env[62096]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 952.326656] env[62096]: return func(*args, **kwargs) [ 952.326656] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 952.326656] env[62096]: raise e [ 952.326656] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 952.326656] env[62096]: nwinfo = self.network_api.allocate_for_instance( [ 952.326656] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 952.326656] env[62096]: created_port_ids = self._update_ports_for_instance( [ 952.326656] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 952.326656] env[62096]: with excutils.save_and_reraise_exception(): [ 952.326656] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 952.326656] env[62096]: self.force_reraise() [ 952.326656] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 952.326656] env[62096]: raise self.value [ 952.326656] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 952.326656] env[62096]: updated_port = self._update_port( [ 952.326656] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 952.326656] env[62096]: _ensure_no_port_binding_failure(port) [ 952.326656] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 952.326656] env[62096]: raise exception.PortBindingFailed(port_id=port['id']) [ 952.327432] env[62096]: nova.exception.PortBindingFailed: Binding failed for port b59a00a8-d38c-41d2-8e43-14ab1a189145, please check neutron logs for more information. [ 952.327432] env[62096]: Removing descriptor: 14 [ 952.335575] env[62096]: DEBUG nova.scheduler.client.report [None req-cf26b410-85fb-4494-a97e-7e8a698de974 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 952.338368] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-cf26b410-85fb-4494-a97e-7e8a698de974 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg 7ed520a90c3a4cd18d883d75e966320b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 952.352265] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7ed520a90c3a4cd18d883d75e966320b [ 952.661900] env[62096]: DEBUG nova.network.neutron [req-814e1d29-289d-4b84-9a0a-7c53c8129526 req-fb8d596e-5661-49f2-ae15-f0efc7d08063 service nova] [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 952.668726] env[62096]: DEBUG nova.compute.manager [None req-01739955-c4cc-4cd7-9c77-96b1dea4ad7a tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] Start spawning the instance on the hypervisor. {{(pid=62096) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 952.694714] env[62096]: DEBUG nova.virt.hardware [None req-01739955-c4cc-4cd7-9c77-96b1dea4ad7a tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-19T11:52:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-19T11:51:55Z,direct_url=,disk_format='vmdk',id=fb764baa-3805-45c4-a694-aa91b0932110,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bd76085d004b408bbcf1ebf76ba2593e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-19T11:51:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 952.694950] env[62096]: DEBUG nova.virt.hardware [None req-01739955-c4cc-4cd7-9c77-96b1dea4ad7a tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Flavor limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 952.695123] env[62096]: DEBUG nova.virt.hardware [None req-01739955-c4cc-4cd7-9c77-96b1dea4ad7a tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Image limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 952.695281] env[62096]: DEBUG nova.virt.hardware [None req-01739955-c4cc-4cd7-9c77-96b1dea4ad7a tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Flavor pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 952.695406] env[62096]: DEBUG nova.virt.hardware [None req-01739955-c4cc-4cd7-9c77-96b1dea4ad7a tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Image pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 952.695552] env[62096]: DEBUG nova.virt.hardware [None req-01739955-c4cc-4cd7-9c77-96b1dea4ad7a tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 952.695784] env[62096]: DEBUG nova.virt.hardware [None req-01739955-c4cc-4cd7-9c77-96b1dea4ad7a tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 952.695945] env[62096]: DEBUG nova.virt.hardware [None req-01739955-c4cc-4cd7-9c77-96b1dea4ad7a tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 952.696119] env[62096]: DEBUG nova.virt.hardware [None req-01739955-c4cc-4cd7-9c77-96b1dea4ad7a tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Got 1 possible topologies {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 952.696281] env[62096]: DEBUG nova.virt.hardware [None req-01739955-c4cc-4cd7-9c77-96b1dea4ad7a tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 952.696445] env[62096]: DEBUG nova.virt.hardware [None req-01739955-c4cc-4cd7-9c77-96b1dea4ad7a tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 952.697311] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec1414b6-c5f3-43c2-b428-a2e0f32600b7 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.705819] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ad7c5bb-49a5-4bca-b4fb-09d90eab3540 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.721598] env[62096]: ERROR nova.compute.manager [None req-01739955-c4cc-4cd7-9c77-96b1dea4ad7a tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port b59a00a8-d38c-41d2-8e43-14ab1a189145, please check neutron logs for more information. [ 952.721598] env[62096]: ERROR nova.compute.manager [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] Traceback (most recent call last): [ 952.721598] env[62096]: ERROR nova.compute.manager [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 952.721598] env[62096]: ERROR nova.compute.manager [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] yield resources [ 952.721598] env[62096]: ERROR nova.compute.manager [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 952.721598] env[62096]: ERROR nova.compute.manager [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] self.driver.spawn(context, instance, image_meta, [ 952.721598] env[62096]: ERROR nova.compute.manager [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 952.721598] env[62096]: ERROR nova.compute.manager [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 952.721598] env[62096]: ERROR nova.compute.manager [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 952.721598] env[62096]: ERROR nova.compute.manager [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] vm_ref = self.build_virtual_machine(instance, [ 952.721598] env[62096]: ERROR nova.compute.manager [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 952.722094] env[62096]: ERROR nova.compute.manager [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] vif_infos = vmwarevif.get_vif_info(self._session, [ 952.722094] env[62096]: ERROR nova.compute.manager [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 952.722094] env[62096]: ERROR nova.compute.manager [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] for vif in network_info: [ 952.722094] env[62096]: ERROR nova.compute.manager [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 952.722094] env[62096]: ERROR nova.compute.manager [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] return self._sync_wrapper(fn, *args, **kwargs) [ 952.722094] env[62096]: ERROR nova.compute.manager [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 952.722094] env[62096]: ERROR nova.compute.manager [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] self.wait() [ 952.722094] env[62096]: ERROR nova.compute.manager [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 952.722094] env[62096]: ERROR nova.compute.manager [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] self[:] = self._gt.wait() [ 952.722094] env[62096]: ERROR nova.compute.manager [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 952.722094] env[62096]: ERROR nova.compute.manager [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] return self._exit_event.wait() [ 952.722094] env[62096]: ERROR nova.compute.manager [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 952.722094] env[62096]: ERROR nova.compute.manager [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] current.throw(*self._exc) [ 952.722583] env[62096]: ERROR nova.compute.manager [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 952.722583] env[62096]: ERROR nova.compute.manager [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] result = function(*args, **kwargs) [ 952.722583] env[62096]: ERROR nova.compute.manager [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 952.722583] env[62096]: ERROR nova.compute.manager [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] return func(*args, **kwargs) [ 952.722583] env[62096]: ERROR nova.compute.manager [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 952.722583] env[62096]: ERROR nova.compute.manager [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] raise e [ 952.722583] env[62096]: ERROR nova.compute.manager [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 952.722583] env[62096]: ERROR nova.compute.manager [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] nwinfo = self.network_api.allocate_for_instance( [ 952.722583] env[62096]: ERROR nova.compute.manager [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 952.722583] env[62096]: ERROR nova.compute.manager [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] created_port_ids = self._update_ports_for_instance( [ 952.722583] env[62096]: ERROR nova.compute.manager [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 952.722583] env[62096]: ERROR nova.compute.manager [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] with excutils.save_and_reraise_exception(): [ 952.722583] env[62096]: ERROR nova.compute.manager [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 952.722963] env[62096]: ERROR nova.compute.manager [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] self.force_reraise() [ 952.722963] env[62096]: ERROR nova.compute.manager [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 952.722963] env[62096]: ERROR nova.compute.manager [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] raise self.value [ 952.722963] env[62096]: ERROR nova.compute.manager [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 952.722963] env[62096]: ERROR nova.compute.manager [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] updated_port = self._update_port( [ 952.722963] env[62096]: ERROR nova.compute.manager [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 952.722963] env[62096]: ERROR nova.compute.manager [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] _ensure_no_port_binding_failure(port) [ 952.722963] env[62096]: ERROR nova.compute.manager [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 952.722963] env[62096]: ERROR nova.compute.manager [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] raise exception.PortBindingFailed(port_id=port['id']) [ 952.722963] env[62096]: ERROR nova.compute.manager [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] nova.exception.PortBindingFailed: Binding failed for port b59a00a8-d38c-41d2-8e43-14ab1a189145, please check neutron logs for more information. [ 952.722963] env[62096]: ERROR nova.compute.manager [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] [ 952.722963] env[62096]: INFO nova.compute.manager [None req-01739955-c4cc-4cd7-9c77-96b1dea4ad7a tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] Terminating instance [ 952.723905] env[62096]: DEBUG oslo_concurrency.lockutils [None req-01739955-c4cc-4cd7-9c77-96b1dea4ad7a tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Acquiring lock "refresh_cache-9968505a-c4cf-4ed5-9bec-0853d1d2840c" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 952.756212] env[62096]: DEBUG nova.network.neutron [req-814e1d29-289d-4b84-9a0a-7c53c8129526 req-fb8d596e-5661-49f2-ae15-f0efc7d08063 service nova] [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 952.756737] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-814e1d29-289d-4b84-9a0a-7c53c8129526 req-fb8d596e-5661-49f2-ae15-f0efc7d08063 service nova] Expecting reply to msg 9afa0caf52224560b11c06d2ea144640 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 952.765595] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9afa0caf52224560b11c06d2ea144640 [ 952.800453] env[62096]: DEBUG nova.network.neutron [None req-38f13e88-01cf-4f03-bbb4-543be09424f3 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] [instance: 9f3ce570-878f-48bf-a08c-0387b0556785] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 952.841955] env[62096]: DEBUG oslo_concurrency.lockutils [None req-cf26b410-85fb-4494-a97e-7e8a698de974 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.194s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 952.842426] env[62096]: DEBUG nova.compute.manager [None req-cf26b410-85fb-4494-a97e-7e8a698de974 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: db3c7e13-12d9-466d-9823-a0a107949588] Start building networks asynchronously for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 952.845033] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-cf26b410-85fb-4494-a97e-7e8a698de974 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg dda2f6a141bb4c67af35a636a119f87d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 952.846273] env[62096]: DEBUG oslo_concurrency.lockutils [None req-2bf30cf6-b5fd-47ba-bb40-34c6f1f16b6f tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.434s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 952.856158] env[62096]: INFO nova.compute.claims [None req-2bf30cf6-b5fd-47ba-bb40-34c6f1f16b6f tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 952.856158] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-2bf30cf6-b5fd-47ba-bb40-34c6f1f16b6f tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg 8b558b7433ae462fa3891ff02ffb3e0a in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 952.856158] env[62096]: DEBUG nova.network.neutron [None req-38f13e88-01cf-4f03-bbb4-543be09424f3 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] [instance: 9f3ce570-878f-48bf-a08c-0387b0556785] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 952.856158] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-38f13e88-01cf-4f03-bbb4-543be09424f3 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Expecting reply to msg 72f745efb5864af884fa653e5a6ac9be in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 952.866558] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 72f745efb5864af884fa653e5a6ac9be [ 952.892467] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dda2f6a141bb4c67af35a636a119f87d [ 952.895320] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8b558b7433ae462fa3891ff02ffb3e0a [ 953.259376] env[62096]: DEBUG oslo_concurrency.lockutils [req-814e1d29-289d-4b84-9a0a-7c53c8129526 req-fb8d596e-5661-49f2-ae15-f0efc7d08063 service nova] Releasing lock "refresh_cache-9968505a-c4cf-4ed5-9bec-0853d1d2840c" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 953.259792] env[62096]: DEBUG oslo_concurrency.lockutils [None req-01739955-c4cc-4cd7-9c77-96b1dea4ad7a tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Acquired lock "refresh_cache-9968505a-c4cf-4ed5-9bec-0853d1d2840c" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 953.259979] env[62096]: DEBUG nova.network.neutron [None req-01739955-c4cc-4cd7-9c77-96b1dea4ad7a tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 953.260449] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-01739955-c4cc-4cd7-9c77-96b1dea4ad7a tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg f02cb1ed4c394f7e8996e1e517c9f136 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 953.268098] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f02cb1ed4c394f7e8996e1e517c9f136 [ 953.353189] env[62096]: DEBUG nova.compute.utils [None req-cf26b410-85fb-4494-a97e-7e8a698de974 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Using /dev/sd instead of None {{(pid=62096) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 953.353694] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-cf26b410-85fb-4494-a97e-7e8a698de974 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg 788fc45843c3427d9ef6e10fe02eb26c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 953.359301] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-2bf30cf6-b5fd-47ba-bb40-34c6f1f16b6f tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg d34df8ccf3704a38b5c444c39e129c63 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 953.359301] env[62096]: DEBUG nova.compute.manager [None req-cf26b410-85fb-4494-a97e-7e8a698de974 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: db3c7e13-12d9-466d-9823-a0a107949588] Allocating IP information in the background. {{(pid=62096) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 953.359301] env[62096]: DEBUG nova.network.neutron [None req-cf26b410-85fb-4494-a97e-7e8a698de974 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: db3c7e13-12d9-466d-9823-a0a107949588] allocate_for_instance() {{(pid=62096) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 953.359301] env[62096]: DEBUG oslo_concurrency.lockutils [None req-38f13e88-01cf-4f03-bbb4-543be09424f3 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Releasing lock "refresh_cache-9f3ce570-878f-48bf-a08c-0387b0556785" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 953.359301] env[62096]: DEBUG nova.compute.manager [None req-38f13e88-01cf-4f03-bbb4-543be09424f3 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] [instance: 9f3ce570-878f-48bf-a08c-0387b0556785] Start destroying the instance on the hypervisor. {{(pid=62096) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 953.359692] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-38f13e88-01cf-4f03-bbb4-543be09424f3 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] [instance: 9f3ce570-878f-48bf-a08c-0387b0556785] Destroying instance {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 953.361075] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ce2a167-3c82-4fc4-ba52-003a8d166ebd {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.365614] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 788fc45843c3427d9ef6e10fe02eb26c [ 953.369260] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [None req-38f13e88-01cf-4f03-bbb4-543be09424f3 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] [instance: 9f3ce570-878f-48bf-a08c-0387b0556785] Powering off the VM {{(pid=62096) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 953.369490] env[62096]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-61bee4b7-f282-4e8d-aa51-446e52d6d012 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.373189] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d34df8ccf3704a38b5c444c39e129c63 [ 953.379739] env[62096]: DEBUG oslo_vmware.api [None req-38f13e88-01cf-4f03-bbb4-543be09424f3 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Waiting for the task: (returnval){ [ 953.379739] env[62096]: value = "task-397487" [ 953.379739] env[62096]: _type = "Task" [ 953.379739] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.389219] env[62096]: DEBUG oslo_vmware.api [None req-38f13e88-01cf-4f03-bbb4-543be09424f3 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Task: {'id': task-397487, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.411601] env[62096]: DEBUG nova.policy [None req-cf26b410-85fb-4494-a97e-7e8a698de974 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '09a4673f033c4d139efe4cd9ba4b7560', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd81a5a37184d4b29ad6df7e77dfd3ee4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62096) authorize /opt/stack/nova/nova/policy.py:203}} [ 953.713074] env[62096]: DEBUG nova.network.neutron [None req-cf26b410-85fb-4494-a97e-7e8a698de974 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: db3c7e13-12d9-466d-9823-a0a107949588] Successfully created port: 356e995c-488b-4cba-bff7-57d3e11bf041 {{(pid=62096) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 953.786862] env[62096]: DEBUG nova.network.neutron [None req-01739955-c4cc-4cd7-9c77-96b1dea4ad7a tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 953.857546] env[62096]: DEBUG nova.compute.manager [None req-cf26b410-85fb-4494-a97e-7e8a698de974 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: db3c7e13-12d9-466d-9823-a0a107949588] Start building block device mappings for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 953.859264] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-cf26b410-85fb-4494-a97e-7e8a698de974 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg 52d4f5e3d2424cfbb1b139b6abd3b40b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 953.891361] env[62096]: DEBUG oslo_vmware.api [None req-38f13e88-01cf-4f03-bbb4-543be09424f3 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Task: {'id': task-397487, 'name': PowerOffVM_Task, 'duration_secs': 0.192986} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.891635] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [None req-38f13e88-01cf-4f03-bbb4-543be09424f3 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] [instance: 9f3ce570-878f-48bf-a08c-0387b0556785] Powered off the VM {{(pid=62096) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 953.891803] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-38f13e88-01cf-4f03-bbb4-543be09424f3 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] [instance: 9f3ce570-878f-48bf-a08c-0387b0556785] Unregistering the VM {{(pid=62096) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 953.892258] env[62096]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9d062f2f-98c5-4851-8c60-637147cfb9e4 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.902769] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 52d4f5e3d2424cfbb1b139b6abd3b40b [ 953.916841] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-38f13e88-01cf-4f03-bbb4-543be09424f3 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] [instance: 9f3ce570-878f-48bf-a08c-0387b0556785] Unregistered the VM {{(pid=62096) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 953.917065] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-38f13e88-01cf-4f03-bbb4-543be09424f3 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] [instance: 9f3ce570-878f-48bf-a08c-0387b0556785] Deleting contents of the VM from datastore datastore1 {{(pid=62096) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 953.917244] env[62096]: DEBUG nova.virt.vmwareapi.ds_util [None req-38f13e88-01cf-4f03-bbb4-543be09424f3 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Deleting the datastore file [datastore1] 9f3ce570-878f-48bf-a08c-0387b0556785 {{(pid=62096) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 953.918373] env[62096]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0e425a6d-85f0-4732-8dc3-5328330e8adb {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.924115] env[62096]: DEBUG oslo_vmware.api [None req-38f13e88-01cf-4f03-bbb4-543be09424f3 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Waiting for the task: (returnval){ [ 953.924115] env[62096]: value = "task-397489" [ 953.924115] env[62096]: _type = "Task" [ 953.924115] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.933072] env[62096]: DEBUG oslo_vmware.api [None req-38f13e88-01cf-4f03-bbb4-543be09424f3 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Task: {'id': task-397489, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.968071] env[62096]: DEBUG nova.network.neutron [None req-01739955-c4cc-4cd7-9c77-96b1dea4ad7a tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 953.968525] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-01739955-c4cc-4cd7-9c77-96b1dea4ad7a tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg b80e6c2efd7c43edae203e70238dff3a in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 953.974076] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27c514a3-bbcc-4854-95c8-ede61c3b6f97 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.976897] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b80e6c2efd7c43edae203e70238dff3a [ 953.981962] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2bc0829-9688-4827-986f-65c235e17c4b {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.027339] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c91db71a-933e-47ac-bfe6-ad4d7a43be78 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.036136] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90a5f206-f0f0-44b2-a4f0-d9b4a8ce5f4e {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.055201] env[62096]: DEBUG nova.compute.provider_tree [None req-2bf30cf6-b5fd-47ba-bb40-34c6f1f16b6f tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 954.055787] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-2bf30cf6-b5fd-47ba-bb40-34c6f1f16b6f tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg 9fd84e84ee154225b6ff099728b391eb in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 954.064660] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9fd84e84ee154225b6ff099728b391eb [ 954.229140] env[62096]: DEBUG nova.compute.manager [req-4d7922f2-275c-4952-90b7-c3dacb2d39b8 req-193d60fc-b5ee-42e9-9391-dac73f144998 service nova] [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] Received event network-vif-deleted-b59a00a8-d38c-41d2-8e43-14ab1a189145 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 954.368299] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-cf26b410-85fb-4494-a97e-7e8a698de974 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg 51c92807837e4d809f28d948d2f18072 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 954.417271] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 51c92807837e4d809f28d948d2f18072 [ 954.434194] env[62096]: DEBUG oslo_vmware.api [None req-38f13e88-01cf-4f03-bbb4-543be09424f3 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Task: {'id': task-397489, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.104327} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.434494] env[62096]: DEBUG nova.virt.vmwareapi.ds_util [None req-38f13e88-01cf-4f03-bbb4-543be09424f3 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Deleted the datastore file {{(pid=62096) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 954.434712] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-38f13e88-01cf-4f03-bbb4-543be09424f3 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] [instance: 9f3ce570-878f-48bf-a08c-0387b0556785] Deleted contents of the VM from datastore datastore1 {{(pid=62096) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 954.434943] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-38f13e88-01cf-4f03-bbb4-543be09424f3 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] [instance: 9f3ce570-878f-48bf-a08c-0387b0556785] Instance destroyed {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 954.435187] env[62096]: INFO nova.compute.manager [None req-38f13e88-01cf-4f03-bbb4-543be09424f3 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] [instance: 9f3ce570-878f-48bf-a08c-0387b0556785] Took 1.08 seconds to destroy the instance on the hypervisor. [ 954.435463] env[62096]: DEBUG oslo.service.loopingcall [None req-38f13e88-01cf-4f03-bbb4-543be09424f3 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 954.435685] env[62096]: DEBUG nova.compute.manager [-] [instance: 9f3ce570-878f-48bf-a08c-0387b0556785] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 954.435810] env[62096]: DEBUG nova.network.neutron [-] [instance: 9f3ce570-878f-48bf-a08c-0387b0556785] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 954.452264] env[62096]: DEBUG nova.network.neutron [-] [instance: 9f3ce570-878f-48bf-a08c-0387b0556785] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 954.453038] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 3c83165d1a5246cdb2d49090d73a5071 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 954.461533] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3c83165d1a5246cdb2d49090d73a5071 [ 954.470367] env[62096]: DEBUG oslo_concurrency.lockutils [None req-01739955-c4cc-4cd7-9c77-96b1dea4ad7a tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Releasing lock "refresh_cache-9968505a-c4cf-4ed5-9bec-0853d1d2840c" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 954.470839] env[62096]: DEBUG nova.compute.manager [None req-01739955-c4cc-4cd7-9c77-96b1dea4ad7a tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] Start destroying the instance on the hypervisor. {{(pid=62096) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 954.471027] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-01739955-c4cc-4cd7-9c77-96b1dea4ad7a tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] Destroying instance {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 954.471361] env[62096]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4dcb3b34-0143-4a04-9a6f-5e544f44fa65 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.481075] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99b2f764-029a-4e72-b5fb-beb83322de42 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.513501] env[62096]: WARNING nova.virt.vmwareapi.vmops [None req-01739955-c4cc-4cd7-9c77-96b1dea4ad7a tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 9968505a-c4cf-4ed5-9bec-0853d1d2840c could not be found. [ 954.516353] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-01739955-c4cc-4cd7-9c77-96b1dea4ad7a tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] Instance destroyed {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 954.516623] env[62096]: INFO nova.compute.manager [None req-01739955-c4cc-4cd7-9c77-96b1dea4ad7a tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] Took 0.05 seconds to destroy the instance on the hypervisor. [ 954.516909] env[62096]: DEBUG oslo.service.loopingcall [None req-01739955-c4cc-4cd7-9c77-96b1dea4ad7a tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 954.517156] env[62096]: DEBUG nova.compute.manager [-] [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 954.517258] env[62096]: DEBUG nova.network.neutron [-] [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 954.537650] env[62096]: DEBUG nova.network.neutron [-] [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 954.538464] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 51765b88d2794d4fbd9d53624c70d0f6 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 954.547085] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 51765b88d2794d4fbd9d53624c70d0f6 [ 954.559218] env[62096]: DEBUG nova.scheduler.client.report [None req-2bf30cf6-b5fd-47ba-bb40-34c6f1f16b6f tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 954.561652] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-2bf30cf6-b5fd-47ba-bb40-34c6f1f16b6f tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg b23864e71a5b476c9cda81b38867bbb4 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 954.578090] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b23864e71a5b476c9cda81b38867bbb4 [ 954.597085] env[62096]: ERROR nova.compute.manager [None req-cf26b410-85fb-4494-a97e-7e8a698de974 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 356e995c-488b-4cba-bff7-57d3e11bf041, please check neutron logs for more information. [ 954.597085] env[62096]: ERROR nova.compute.manager Traceback (most recent call last): [ 954.597085] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 954.597085] env[62096]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 954.597085] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 954.597085] env[62096]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 954.597085] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 954.597085] env[62096]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 954.597085] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 954.597085] env[62096]: ERROR nova.compute.manager self.force_reraise() [ 954.597085] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 954.597085] env[62096]: ERROR nova.compute.manager raise self.value [ 954.597085] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 954.597085] env[62096]: ERROR nova.compute.manager updated_port = self._update_port( [ 954.597085] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 954.597085] env[62096]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 954.597588] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 954.597588] env[62096]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 954.597588] env[62096]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 356e995c-488b-4cba-bff7-57d3e11bf041, please check neutron logs for more information. [ 954.597588] env[62096]: ERROR nova.compute.manager [ 954.597588] env[62096]: Traceback (most recent call last): [ 954.597588] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 954.597588] env[62096]: listener.cb(fileno) [ 954.597588] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 954.597588] env[62096]: result = function(*args, **kwargs) [ 954.597588] env[62096]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 954.597588] env[62096]: return func(*args, **kwargs) [ 954.597588] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 954.597588] env[62096]: raise e [ 954.597588] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 954.597588] env[62096]: nwinfo = self.network_api.allocate_for_instance( [ 954.597588] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 954.597588] env[62096]: created_port_ids = self._update_ports_for_instance( [ 954.597588] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 954.597588] env[62096]: with excutils.save_and_reraise_exception(): [ 954.597588] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 954.597588] env[62096]: self.force_reraise() [ 954.597588] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 954.597588] env[62096]: raise self.value [ 954.597588] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 954.597588] env[62096]: updated_port = self._update_port( [ 954.597588] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 954.597588] env[62096]: _ensure_no_port_binding_failure(port) [ 954.597588] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 954.597588] env[62096]: raise exception.PortBindingFailed(port_id=port['id']) [ 954.598470] env[62096]: nova.exception.PortBindingFailed: Binding failed for port 356e995c-488b-4cba-bff7-57d3e11bf041, please check neutron logs for more information. [ 954.598470] env[62096]: Removing descriptor: 14 [ 954.871608] env[62096]: DEBUG nova.compute.manager [None req-cf26b410-85fb-4494-a97e-7e8a698de974 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: db3c7e13-12d9-466d-9823-a0a107949588] Start spawning the instance on the hypervisor. {{(pid=62096) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 954.892810] env[62096]: DEBUG nova.virt.hardware [None req-cf26b410-85fb-4494-a97e-7e8a698de974 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-19T11:52:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-19T11:51:55Z,direct_url=,disk_format='vmdk',id=fb764baa-3805-45c4-a694-aa91b0932110,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bd76085d004b408bbcf1ebf76ba2593e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-19T11:51:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 954.893052] env[62096]: DEBUG nova.virt.hardware [None req-cf26b410-85fb-4494-a97e-7e8a698de974 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Flavor limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 954.893385] env[62096]: DEBUG nova.virt.hardware [None req-cf26b410-85fb-4494-a97e-7e8a698de974 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Image limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 954.893385] env[62096]: DEBUG nova.virt.hardware [None req-cf26b410-85fb-4494-a97e-7e8a698de974 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Flavor pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 954.893510] env[62096]: DEBUG nova.virt.hardware [None req-cf26b410-85fb-4494-a97e-7e8a698de974 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Image pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 954.893647] env[62096]: DEBUG nova.virt.hardware [None req-cf26b410-85fb-4494-a97e-7e8a698de974 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 954.893841] env[62096]: DEBUG nova.virt.hardware [None req-cf26b410-85fb-4494-a97e-7e8a698de974 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 954.894030] env[62096]: DEBUG nova.virt.hardware [None req-cf26b410-85fb-4494-a97e-7e8a698de974 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 954.894193] env[62096]: DEBUG nova.virt.hardware [None req-cf26b410-85fb-4494-a97e-7e8a698de974 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Got 1 possible topologies {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 954.894354] env[62096]: DEBUG nova.virt.hardware [None req-cf26b410-85fb-4494-a97e-7e8a698de974 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 954.894522] env[62096]: DEBUG nova.virt.hardware [None req-cf26b410-85fb-4494-a97e-7e8a698de974 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 954.895412] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88ed54d3-7cc2-47f6-a8f3-95c4417f056c {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.904895] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-970c70d6-6820-43cc-b81a-c047c32f8fd8 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.920114] env[62096]: ERROR nova.compute.manager [None req-cf26b410-85fb-4494-a97e-7e8a698de974 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: db3c7e13-12d9-466d-9823-a0a107949588] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 356e995c-488b-4cba-bff7-57d3e11bf041, please check neutron logs for more information. [ 954.920114] env[62096]: ERROR nova.compute.manager [instance: db3c7e13-12d9-466d-9823-a0a107949588] Traceback (most recent call last): [ 954.920114] env[62096]: ERROR nova.compute.manager [instance: db3c7e13-12d9-466d-9823-a0a107949588] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 954.920114] env[62096]: ERROR nova.compute.manager [instance: db3c7e13-12d9-466d-9823-a0a107949588] yield resources [ 954.920114] env[62096]: ERROR nova.compute.manager [instance: db3c7e13-12d9-466d-9823-a0a107949588] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 954.920114] env[62096]: ERROR nova.compute.manager [instance: db3c7e13-12d9-466d-9823-a0a107949588] self.driver.spawn(context, instance, image_meta, [ 954.920114] env[62096]: ERROR nova.compute.manager [instance: db3c7e13-12d9-466d-9823-a0a107949588] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 954.920114] env[62096]: ERROR nova.compute.manager [instance: db3c7e13-12d9-466d-9823-a0a107949588] self._vmops.spawn(context, instance, image_meta, injected_files, [ 954.920114] env[62096]: ERROR nova.compute.manager [instance: db3c7e13-12d9-466d-9823-a0a107949588] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 954.920114] env[62096]: ERROR nova.compute.manager [instance: db3c7e13-12d9-466d-9823-a0a107949588] vm_ref = self.build_virtual_machine(instance, [ 954.920114] env[62096]: ERROR nova.compute.manager [instance: db3c7e13-12d9-466d-9823-a0a107949588] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 954.920631] env[62096]: ERROR nova.compute.manager [instance: db3c7e13-12d9-466d-9823-a0a107949588] vif_infos = vmwarevif.get_vif_info(self._session, [ 954.920631] env[62096]: ERROR nova.compute.manager [instance: db3c7e13-12d9-466d-9823-a0a107949588] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 954.920631] env[62096]: ERROR nova.compute.manager [instance: db3c7e13-12d9-466d-9823-a0a107949588] for vif in network_info: [ 954.920631] env[62096]: ERROR nova.compute.manager [instance: db3c7e13-12d9-466d-9823-a0a107949588] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 954.920631] env[62096]: ERROR nova.compute.manager [instance: db3c7e13-12d9-466d-9823-a0a107949588] return self._sync_wrapper(fn, *args, **kwargs) [ 954.920631] env[62096]: ERROR nova.compute.manager [instance: db3c7e13-12d9-466d-9823-a0a107949588] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 954.920631] env[62096]: ERROR nova.compute.manager [instance: db3c7e13-12d9-466d-9823-a0a107949588] self.wait() [ 954.920631] env[62096]: ERROR nova.compute.manager [instance: db3c7e13-12d9-466d-9823-a0a107949588] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 954.920631] env[62096]: ERROR nova.compute.manager [instance: db3c7e13-12d9-466d-9823-a0a107949588] self[:] = self._gt.wait() [ 954.920631] env[62096]: ERROR nova.compute.manager [instance: db3c7e13-12d9-466d-9823-a0a107949588] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 954.920631] env[62096]: ERROR nova.compute.manager [instance: db3c7e13-12d9-466d-9823-a0a107949588] return self._exit_event.wait() [ 954.920631] env[62096]: ERROR nova.compute.manager [instance: db3c7e13-12d9-466d-9823-a0a107949588] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 954.920631] env[62096]: ERROR nova.compute.manager [instance: db3c7e13-12d9-466d-9823-a0a107949588] current.throw(*self._exc) [ 954.921048] env[62096]: ERROR nova.compute.manager [instance: db3c7e13-12d9-466d-9823-a0a107949588] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 954.921048] env[62096]: ERROR nova.compute.manager [instance: db3c7e13-12d9-466d-9823-a0a107949588] result = function(*args, **kwargs) [ 954.921048] env[62096]: ERROR nova.compute.manager [instance: db3c7e13-12d9-466d-9823-a0a107949588] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 954.921048] env[62096]: ERROR nova.compute.manager [instance: db3c7e13-12d9-466d-9823-a0a107949588] return func(*args, **kwargs) [ 954.921048] env[62096]: ERROR nova.compute.manager [instance: db3c7e13-12d9-466d-9823-a0a107949588] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 954.921048] env[62096]: ERROR nova.compute.manager [instance: db3c7e13-12d9-466d-9823-a0a107949588] raise e [ 954.921048] env[62096]: ERROR nova.compute.manager [instance: db3c7e13-12d9-466d-9823-a0a107949588] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 954.921048] env[62096]: ERROR nova.compute.manager [instance: db3c7e13-12d9-466d-9823-a0a107949588] nwinfo = self.network_api.allocate_for_instance( [ 954.921048] env[62096]: ERROR nova.compute.manager [instance: db3c7e13-12d9-466d-9823-a0a107949588] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 954.921048] env[62096]: ERROR nova.compute.manager [instance: db3c7e13-12d9-466d-9823-a0a107949588] created_port_ids = self._update_ports_for_instance( [ 954.921048] env[62096]: ERROR nova.compute.manager [instance: db3c7e13-12d9-466d-9823-a0a107949588] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 954.921048] env[62096]: ERROR nova.compute.manager [instance: db3c7e13-12d9-466d-9823-a0a107949588] with excutils.save_and_reraise_exception(): [ 954.921048] env[62096]: ERROR nova.compute.manager [instance: db3c7e13-12d9-466d-9823-a0a107949588] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 954.921618] env[62096]: ERROR nova.compute.manager [instance: db3c7e13-12d9-466d-9823-a0a107949588] self.force_reraise() [ 954.921618] env[62096]: ERROR nova.compute.manager [instance: db3c7e13-12d9-466d-9823-a0a107949588] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 954.921618] env[62096]: ERROR nova.compute.manager [instance: db3c7e13-12d9-466d-9823-a0a107949588] raise self.value [ 954.921618] env[62096]: ERROR nova.compute.manager [instance: db3c7e13-12d9-466d-9823-a0a107949588] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 954.921618] env[62096]: ERROR nova.compute.manager [instance: db3c7e13-12d9-466d-9823-a0a107949588] updated_port = self._update_port( [ 954.921618] env[62096]: ERROR nova.compute.manager [instance: db3c7e13-12d9-466d-9823-a0a107949588] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 954.921618] env[62096]: ERROR nova.compute.manager [instance: db3c7e13-12d9-466d-9823-a0a107949588] _ensure_no_port_binding_failure(port) [ 954.921618] env[62096]: ERROR nova.compute.manager [instance: db3c7e13-12d9-466d-9823-a0a107949588] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 954.921618] env[62096]: ERROR nova.compute.manager [instance: db3c7e13-12d9-466d-9823-a0a107949588] raise exception.PortBindingFailed(port_id=port['id']) [ 954.921618] env[62096]: ERROR nova.compute.manager [instance: db3c7e13-12d9-466d-9823-a0a107949588] nova.exception.PortBindingFailed: Binding failed for port 356e995c-488b-4cba-bff7-57d3e11bf041, please check neutron logs for more information. [ 954.921618] env[62096]: ERROR nova.compute.manager [instance: db3c7e13-12d9-466d-9823-a0a107949588] [ 954.921618] env[62096]: INFO nova.compute.manager [None req-cf26b410-85fb-4494-a97e-7e8a698de974 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: db3c7e13-12d9-466d-9823-a0a107949588] Terminating instance [ 954.922737] env[62096]: DEBUG oslo_concurrency.lockutils [None req-cf26b410-85fb-4494-a97e-7e8a698de974 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Acquiring lock "refresh_cache-db3c7e13-12d9-466d-9823-a0a107949588" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 954.922890] env[62096]: DEBUG oslo_concurrency.lockutils [None req-cf26b410-85fb-4494-a97e-7e8a698de974 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Acquired lock "refresh_cache-db3c7e13-12d9-466d-9823-a0a107949588" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 954.923046] env[62096]: DEBUG nova.network.neutron [None req-cf26b410-85fb-4494-a97e-7e8a698de974 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: db3c7e13-12d9-466d-9823-a0a107949588] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 954.923486] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-cf26b410-85fb-4494-a97e-7e8a698de974 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg eccd4c05363a4939bd8887dcf82ac271 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 954.930617] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg eccd4c05363a4939bd8887dcf82ac271 [ 954.954902] env[62096]: DEBUG nova.network.neutron [-] [instance: 9f3ce570-878f-48bf-a08c-0387b0556785] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 954.955639] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 417439e1047e48b7b0e9cbfba4ad2abb in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 954.963232] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 417439e1047e48b7b0e9cbfba4ad2abb [ 955.040702] env[62096]: DEBUG nova.network.neutron [-] [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 955.041276] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 8cfe0869c346431c963015224669ec36 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 955.051126] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8cfe0869c346431c963015224669ec36 [ 955.064446] env[62096]: DEBUG oslo_concurrency.lockutils [None req-2bf30cf6-b5fd-47ba-bb40-34c6f1f16b6f tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.218s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 955.065080] env[62096]: DEBUG nova.compute.manager [None req-2bf30cf6-b5fd-47ba-bb40-34c6f1f16b6f tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] Start building networks asynchronously for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 955.066880] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-2bf30cf6-b5fd-47ba-bb40-34c6f1f16b6f tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg 0f89aab12f374e948631493b307e5888 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 955.067862] env[62096]: DEBUG oslo_concurrency.lockutils [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 3.308s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 955.068092] env[62096]: DEBUG nova.objects.instance [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] [instance: 9f3ce570-878f-48bf-a08c-0387b0556785] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62096) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 955.069512] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Expecting reply to msg a725738ff4dc4191a759a7746f945cb6 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 955.101986] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0f89aab12f374e948631493b307e5888 [ 955.102706] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a725738ff4dc4191a759a7746f945cb6 [ 955.417588] env[62096]: DEBUG oslo_concurrency.lockutils [None req-76cc4f6b-4345-45aa-869a-e02283bb07dd tempest-ServerActionsTestOtherA-356376316 tempest-ServerActionsTestOtherA-356376316-project-member] Acquiring lock "e6ce8ba5-170a-4221-9e41-15580876f28f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 955.418114] env[62096]: DEBUG oslo_concurrency.lockutils [None req-76cc4f6b-4345-45aa-869a-e02283bb07dd tempest-ServerActionsTestOtherA-356376316 tempest-ServerActionsTestOtherA-356376316-project-member] Lock "e6ce8ba5-170a-4221-9e41-15580876f28f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 955.418728] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-76cc4f6b-4345-45aa-869a-e02283bb07dd tempest-ServerActionsTestOtherA-356376316 tempest-ServerActionsTestOtherA-356376316-project-member] Expecting reply to msg 6563f96d1e1541bb9ec3333fb2180997 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 955.430815] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6563f96d1e1541bb9ec3333fb2180997 [ 955.441573] env[62096]: DEBUG nova.network.neutron [None req-cf26b410-85fb-4494-a97e-7e8a698de974 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: db3c7e13-12d9-466d-9823-a0a107949588] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 955.463539] env[62096]: INFO nova.compute.manager [-] [instance: 9f3ce570-878f-48bf-a08c-0387b0556785] Took 1.03 seconds to deallocate network for instance. [ 955.468090] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-38f13e88-01cf-4f03-bbb4-543be09424f3 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Expecting reply to msg 4b1f4536a2a84c77a8572cb629e1321b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 955.505616] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4b1f4536a2a84c77a8572cb629e1321b [ 955.515439] env[62096]: DEBUG nova.network.neutron [None req-cf26b410-85fb-4494-a97e-7e8a698de974 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: db3c7e13-12d9-466d-9823-a0a107949588] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 955.515439] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-cf26b410-85fb-4494-a97e-7e8a698de974 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg 61182185c31242bdbace65f8c01ce9b9 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 955.523459] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 61182185c31242bdbace65f8c01ce9b9 [ 955.543168] env[62096]: INFO nova.compute.manager [-] [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] Took 1.03 seconds to deallocate network for instance. [ 955.545443] env[62096]: DEBUG nova.compute.claims [None req-01739955-c4cc-4cd7-9c77-96b1dea4ad7a tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] Aborting claim: {{(pid=62096) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 955.545773] env[62096]: DEBUG oslo_concurrency.lockutils [None req-01739955-c4cc-4cd7-9c77-96b1dea4ad7a tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 955.572881] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Expecting reply to msg 5497a71586c8468d88388c82b698d6e2 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 955.574659] env[62096]: DEBUG nova.compute.utils [None req-2bf30cf6-b5fd-47ba-bb40-34c6f1f16b6f tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Using /dev/sd instead of None {{(pid=62096) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 955.575411] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-2bf30cf6-b5fd-47ba-bb40-34c6f1f16b6f tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg e0d53575dd3d49d98cc36ad68f9f5c6d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 955.576471] env[62096]: DEBUG nova.compute.manager [None req-2bf30cf6-b5fd-47ba-bb40-34c6f1f16b6f tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] Allocating IP information in the background. {{(pid=62096) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 955.580229] env[62096]: DEBUG nova.network.neutron [None req-2bf30cf6-b5fd-47ba-bb40-34c6f1f16b6f tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] allocate_for_instance() {{(pid=62096) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 955.584866] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5497a71586c8468d88388c82b698d6e2 [ 955.585427] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e0d53575dd3d49d98cc36ad68f9f5c6d [ 955.616859] env[62096]: DEBUG nova.policy [None req-2bf30cf6-b5fd-47ba-bb40-34c6f1f16b6f tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6d00589b1aa24dd7beb7c3ac5cb2a8ca', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bffafc9dc83d477d823cd7364968f48a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62096) authorize /opt/stack/nova/nova/policy.py:203}} [ 955.865823] env[62096]: DEBUG nova.network.neutron [None req-2bf30cf6-b5fd-47ba-bb40-34c6f1f16b6f tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] Successfully created port: b8cb63f1-3918-4d7e-9445-e474542f97cc {{(pid=62096) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 955.920500] env[62096]: DEBUG nova.compute.manager [None req-76cc4f6b-4345-45aa-869a-e02283bb07dd tempest-ServerActionsTestOtherA-356376316 tempest-ServerActionsTestOtherA-356376316-project-member] [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] Starting instance... {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 955.922385] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-76cc4f6b-4345-45aa-869a-e02283bb07dd tempest-ServerActionsTestOtherA-356376316 tempest-ServerActionsTestOtherA-356376316-project-member] Expecting reply to msg c5171c86b8ae4b429027a7e5e3710154 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 955.955126] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c5171c86b8ae4b429027a7e5e3710154 [ 955.972472] env[62096]: DEBUG oslo_concurrency.lockutils [None req-38f13e88-01cf-4f03-bbb4-543be09424f3 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 956.016690] env[62096]: DEBUG oslo_concurrency.lockutils [None req-cf26b410-85fb-4494-a97e-7e8a698de974 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Releasing lock "refresh_cache-db3c7e13-12d9-466d-9823-a0a107949588" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 956.017105] env[62096]: DEBUG nova.compute.manager [None req-cf26b410-85fb-4494-a97e-7e8a698de974 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: db3c7e13-12d9-466d-9823-a0a107949588] Start destroying the instance on the hypervisor. {{(pid=62096) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 956.017289] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-cf26b410-85fb-4494-a97e-7e8a698de974 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: db3c7e13-12d9-466d-9823-a0a107949588] Destroying instance {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 956.017590] env[62096]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2f3e4342-a4b1-47e4-be8f-7fb468c9c7be {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.030047] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0eb96ef1-5634-4ceb-a25e-d03d43911b40 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.056616] env[62096]: WARNING nova.virt.vmwareapi.vmops [None req-cf26b410-85fb-4494-a97e-7e8a698de974 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: db3c7e13-12d9-466d-9823-a0a107949588] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance db3c7e13-12d9-466d-9823-a0a107949588 could not be found. [ 956.056836] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-cf26b410-85fb-4494-a97e-7e8a698de974 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: db3c7e13-12d9-466d-9823-a0a107949588] Instance destroyed {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 956.057013] env[62096]: INFO nova.compute.manager [None req-cf26b410-85fb-4494-a97e-7e8a698de974 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: db3c7e13-12d9-466d-9823-a0a107949588] Took 0.04 seconds to destroy the instance on the hypervisor. [ 956.057268] env[62096]: DEBUG oslo.service.loopingcall [None req-cf26b410-85fb-4494-a97e-7e8a698de974 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 956.057475] env[62096]: DEBUG nova.compute.manager [-] [instance: db3c7e13-12d9-466d-9823-a0a107949588] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 956.057569] env[62096]: DEBUG nova.network.neutron [-] [instance: db3c7e13-12d9-466d-9823-a0a107949588] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 956.070829] env[62096]: DEBUG nova.network.neutron [-] [instance: db3c7e13-12d9-466d-9823-a0a107949588] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 956.071277] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg d708e110567a4470bf0a7713d39e35db in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 956.077401] env[62096]: DEBUG oslo_concurrency.lockutils [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.009s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 956.077735] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-2c829ebc-c734-45e1-9a0f-cb5ed22a321f tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Expecting reply to msg 51950ad6dbc74a44b086b5976cfb882b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 956.083551] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d708e110567a4470bf0a7713d39e35db [ 956.083957] env[62096]: DEBUG oslo_concurrency.lockutils [None req-01739955-c4cc-4cd7-9c77-96b1dea4ad7a tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.538s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 956.085758] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-01739955-c4cc-4cd7-9c77-96b1dea4ad7a tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg 2ac47d8277e74c838fa6989519e83a5f in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 956.086781] env[62096]: DEBUG nova.compute.manager [None req-2bf30cf6-b5fd-47ba-bb40-34c6f1f16b6f tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] Start building block device mappings for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 956.088332] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-2bf30cf6-b5fd-47ba-bb40-34c6f1f16b6f tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg 62af288a5b334ecb926a071c9478052e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 956.091290] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 51950ad6dbc74a44b086b5976cfb882b [ 956.130774] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 62af288a5b334ecb926a071c9478052e [ 956.132128] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2ac47d8277e74c838fa6989519e83a5f [ 956.253948] env[62096]: DEBUG nova.compute.manager [req-c932e473-0985-4458-b347-fee2b1daebd0 req-ebfef654-5e42-443f-9b03-c8d919188c84 service nova] [instance: db3c7e13-12d9-466d-9823-a0a107949588] Received event network-changed-356e995c-488b-4cba-bff7-57d3e11bf041 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 956.254222] env[62096]: DEBUG nova.compute.manager [req-c932e473-0985-4458-b347-fee2b1daebd0 req-ebfef654-5e42-443f-9b03-c8d919188c84 service nova] [instance: db3c7e13-12d9-466d-9823-a0a107949588] Refreshing instance network info cache due to event network-changed-356e995c-488b-4cba-bff7-57d3e11bf041. {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 956.254493] env[62096]: DEBUG oslo_concurrency.lockutils [req-c932e473-0985-4458-b347-fee2b1daebd0 req-ebfef654-5e42-443f-9b03-c8d919188c84 service nova] Acquiring lock "refresh_cache-db3c7e13-12d9-466d-9823-a0a107949588" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 956.254642] env[62096]: DEBUG oslo_concurrency.lockutils [req-c932e473-0985-4458-b347-fee2b1daebd0 req-ebfef654-5e42-443f-9b03-c8d919188c84 service nova] Acquired lock "refresh_cache-db3c7e13-12d9-466d-9823-a0a107949588" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 956.254801] env[62096]: DEBUG nova.network.neutron [req-c932e473-0985-4458-b347-fee2b1daebd0 req-ebfef654-5e42-443f-9b03-c8d919188c84 service nova] [instance: db3c7e13-12d9-466d-9823-a0a107949588] Refreshing network info cache for port 356e995c-488b-4cba-bff7-57d3e11bf041 {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 956.255293] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-c932e473-0985-4458-b347-fee2b1daebd0 req-ebfef654-5e42-443f-9b03-c8d919188c84 service nova] Expecting reply to msg 193224a50a7843a484c64195454f7011 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 956.262924] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 193224a50a7843a484c64195454f7011 [ 956.450648] env[62096]: DEBUG oslo_concurrency.lockutils [None req-76cc4f6b-4345-45aa-869a-e02283bb07dd tempest-ServerActionsTestOtherA-356376316 tempest-ServerActionsTestOtherA-356376316-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 956.573543] env[62096]: DEBUG nova.network.neutron [-] [instance: db3c7e13-12d9-466d-9823-a0a107949588] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 956.574182] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg e04dc4a1d30b4c798ca8ffe78d2333a0 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 956.583289] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e04dc4a1d30b4c798ca8ffe78d2333a0 [ 956.598144] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-2bf30cf6-b5fd-47ba-bb40-34c6f1f16b6f tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg 8ff13bf70b8f4b2da54660213bcbf19c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 956.625836] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8ff13bf70b8f4b2da54660213bcbf19c [ 956.688504] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4998823d-439d-41e6-b271-8604eca5a73b {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.695962] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50085100-16ee-485d-92f7-d2b1d491b7de {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.726653] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-809cfc20-5f85-4609-aa5f-d0ad5c2a6b2d {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.733619] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba5816e8-6ba6-4328-9934-6c74f88fb1a9 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.746648] env[62096]: DEBUG nova.compute.provider_tree [None req-01739955-c4cc-4cd7-9c77-96b1dea4ad7a tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 956.747241] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-01739955-c4cc-4cd7-9c77-96b1dea4ad7a tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg 06e74279993941c3ab146a4e238b2847 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 956.754453] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 06e74279993941c3ab146a4e238b2847 [ 956.772382] env[62096]: DEBUG nova.network.neutron [req-c932e473-0985-4458-b347-fee2b1daebd0 req-ebfef654-5e42-443f-9b03-c8d919188c84 service nova] [instance: db3c7e13-12d9-466d-9823-a0a107949588] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 956.777229] env[62096]: ERROR nova.compute.manager [None req-2bf30cf6-b5fd-47ba-bb40-34c6f1f16b6f tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port b8cb63f1-3918-4d7e-9445-e474542f97cc, please check neutron logs for more information. [ 956.777229] env[62096]: ERROR nova.compute.manager Traceback (most recent call last): [ 956.777229] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 956.777229] env[62096]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 956.777229] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 956.777229] env[62096]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 956.777229] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 956.777229] env[62096]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 956.777229] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 956.777229] env[62096]: ERROR nova.compute.manager self.force_reraise() [ 956.777229] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 956.777229] env[62096]: ERROR nova.compute.manager raise self.value [ 956.777229] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 956.777229] env[62096]: ERROR nova.compute.manager updated_port = self._update_port( [ 956.777229] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 956.777229] env[62096]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 956.777798] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 956.777798] env[62096]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 956.777798] env[62096]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port b8cb63f1-3918-4d7e-9445-e474542f97cc, please check neutron logs for more information. [ 956.777798] env[62096]: ERROR nova.compute.manager [ 956.777798] env[62096]: Traceback (most recent call last): [ 956.777798] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 956.777798] env[62096]: listener.cb(fileno) [ 956.777798] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 956.777798] env[62096]: result = function(*args, **kwargs) [ 956.777798] env[62096]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 956.777798] env[62096]: return func(*args, **kwargs) [ 956.777798] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 956.777798] env[62096]: raise e [ 956.777798] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 956.777798] env[62096]: nwinfo = self.network_api.allocate_for_instance( [ 956.777798] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 956.777798] env[62096]: created_port_ids = self._update_ports_for_instance( [ 956.777798] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 956.777798] env[62096]: with excutils.save_and_reraise_exception(): [ 956.777798] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 956.777798] env[62096]: self.force_reraise() [ 956.777798] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 956.777798] env[62096]: raise self.value [ 956.777798] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 956.777798] env[62096]: updated_port = self._update_port( [ 956.777798] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 956.777798] env[62096]: _ensure_no_port_binding_failure(port) [ 956.777798] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 956.777798] env[62096]: raise exception.PortBindingFailed(port_id=port['id']) [ 956.778798] env[62096]: nova.exception.PortBindingFailed: Binding failed for port b8cb63f1-3918-4d7e-9445-e474542f97cc, please check neutron logs for more information. [ 956.778798] env[62096]: Removing descriptor: 14 [ 956.843116] env[62096]: DEBUG nova.network.neutron [req-c932e473-0985-4458-b347-fee2b1daebd0 req-ebfef654-5e42-443f-9b03-c8d919188c84 service nova] [instance: db3c7e13-12d9-466d-9823-a0a107949588] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 956.843622] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-c932e473-0985-4458-b347-fee2b1daebd0 req-ebfef654-5e42-443f-9b03-c8d919188c84 service nova] Expecting reply to msg c55ce88d80204cbab9693ad9c00a1221 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 956.851837] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c55ce88d80204cbab9693ad9c00a1221 [ 957.076811] env[62096]: INFO nova.compute.manager [-] [instance: db3c7e13-12d9-466d-9823-a0a107949588] Took 1.02 seconds to deallocate network for instance. [ 957.079219] env[62096]: DEBUG nova.compute.claims [None req-cf26b410-85fb-4494-a97e-7e8a698de974 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: db3c7e13-12d9-466d-9823-a0a107949588] Aborting claim: {{(pid=62096) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 957.079423] env[62096]: DEBUG oslo_concurrency.lockutils [None req-cf26b410-85fb-4494-a97e-7e8a698de974 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 957.098507] env[62096]: DEBUG nova.compute.manager [None req-2bf30cf6-b5fd-47ba-bb40-34c6f1f16b6f tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] Start spawning the instance on the hypervisor. {{(pid=62096) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 957.123554] env[62096]: DEBUG nova.virt.hardware [None req-2bf30cf6-b5fd-47ba-bb40-34c6f1f16b6f tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-19T11:52:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-19T11:51:55Z,direct_url=,disk_format='vmdk',id=fb764baa-3805-45c4-a694-aa91b0932110,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bd76085d004b408bbcf1ebf76ba2593e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-19T11:51:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 957.123789] env[62096]: DEBUG nova.virt.hardware [None req-2bf30cf6-b5fd-47ba-bb40-34c6f1f16b6f tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Flavor limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 957.123940] env[62096]: DEBUG nova.virt.hardware [None req-2bf30cf6-b5fd-47ba-bb40-34c6f1f16b6f tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Image limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 957.124175] env[62096]: DEBUG nova.virt.hardware [None req-2bf30cf6-b5fd-47ba-bb40-34c6f1f16b6f tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Flavor pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 957.124329] env[62096]: DEBUG nova.virt.hardware [None req-2bf30cf6-b5fd-47ba-bb40-34c6f1f16b6f tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Image pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 957.124492] env[62096]: DEBUG nova.virt.hardware [None req-2bf30cf6-b5fd-47ba-bb40-34c6f1f16b6f tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 957.124679] env[62096]: DEBUG nova.virt.hardware [None req-2bf30cf6-b5fd-47ba-bb40-34c6f1f16b6f tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 957.124836] env[62096]: DEBUG nova.virt.hardware [None req-2bf30cf6-b5fd-47ba-bb40-34c6f1f16b6f tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 957.125055] env[62096]: DEBUG nova.virt.hardware [None req-2bf30cf6-b5fd-47ba-bb40-34c6f1f16b6f tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Got 1 possible topologies {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 957.125234] env[62096]: DEBUG nova.virt.hardware [None req-2bf30cf6-b5fd-47ba-bb40-34c6f1f16b6f tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 957.125406] env[62096]: DEBUG nova.virt.hardware [None req-2bf30cf6-b5fd-47ba-bb40-34c6f1f16b6f tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 957.126243] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e6772ec-6e56-4f5e-9282-e09694dfb593 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.134136] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c4b12f9-3b19-40f7-8dad-944b866caab6 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.147236] env[62096]: ERROR nova.compute.manager [None req-2bf30cf6-b5fd-47ba-bb40-34c6f1f16b6f tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port b8cb63f1-3918-4d7e-9445-e474542f97cc, please check neutron logs for more information. [ 957.147236] env[62096]: ERROR nova.compute.manager [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] Traceback (most recent call last): [ 957.147236] env[62096]: ERROR nova.compute.manager [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 957.147236] env[62096]: ERROR nova.compute.manager [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] yield resources [ 957.147236] env[62096]: ERROR nova.compute.manager [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 957.147236] env[62096]: ERROR nova.compute.manager [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] self.driver.spawn(context, instance, image_meta, [ 957.147236] env[62096]: ERROR nova.compute.manager [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 957.147236] env[62096]: ERROR nova.compute.manager [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] self._vmops.spawn(context, instance, image_meta, injected_files, [ 957.147236] env[62096]: ERROR nova.compute.manager [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 957.147236] env[62096]: ERROR nova.compute.manager [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] vm_ref = self.build_virtual_machine(instance, [ 957.147236] env[62096]: ERROR nova.compute.manager [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 957.147653] env[62096]: ERROR nova.compute.manager [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] vif_infos = vmwarevif.get_vif_info(self._session, [ 957.147653] env[62096]: ERROR nova.compute.manager [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 957.147653] env[62096]: ERROR nova.compute.manager [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] for vif in network_info: [ 957.147653] env[62096]: ERROR nova.compute.manager [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 957.147653] env[62096]: ERROR nova.compute.manager [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] return self._sync_wrapper(fn, *args, **kwargs) [ 957.147653] env[62096]: ERROR nova.compute.manager [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 957.147653] env[62096]: ERROR nova.compute.manager [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] self.wait() [ 957.147653] env[62096]: ERROR nova.compute.manager [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 957.147653] env[62096]: ERROR nova.compute.manager [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] self[:] = self._gt.wait() [ 957.147653] env[62096]: ERROR nova.compute.manager [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 957.147653] env[62096]: ERROR nova.compute.manager [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] return self._exit_event.wait() [ 957.147653] env[62096]: ERROR nova.compute.manager [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 957.147653] env[62096]: ERROR nova.compute.manager [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] current.throw(*self._exc) [ 957.148102] env[62096]: ERROR nova.compute.manager [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 957.148102] env[62096]: ERROR nova.compute.manager [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] result = function(*args, **kwargs) [ 957.148102] env[62096]: ERROR nova.compute.manager [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 957.148102] env[62096]: ERROR nova.compute.manager [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] return func(*args, **kwargs) [ 957.148102] env[62096]: ERROR nova.compute.manager [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 957.148102] env[62096]: ERROR nova.compute.manager [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] raise e [ 957.148102] env[62096]: ERROR nova.compute.manager [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 957.148102] env[62096]: ERROR nova.compute.manager [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] nwinfo = self.network_api.allocate_for_instance( [ 957.148102] env[62096]: ERROR nova.compute.manager [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 957.148102] env[62096]: ERROR nova.compute.manager [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] created_port_ids = self._update_ports_for_instance( [ 957.148102] env[62096]: ERROR nova.compute.manager [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 957.148102] env[62096]: ERROR nova.compute.manager [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] with excutils.save_and_reraise_exception(): [ 957.148102] env[62096]: ERROR nova.compute.manager [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 957.148543] env[62096]: ERROR nova.compute.manager [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] self.force_reraise() [ 957.148543] env[62096]: ERROR nova.compute.manager [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 957.148543] env[62096]: ERROR nova.compute.manager [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] raise self.value [ 957.148543] env[62096]: ERROR nova.compute.manager [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 957.148543] env[62096]: ERROR nova.compute.manager [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] updated_port = self._update_port( [ 957.148543] env[62096]: ERROR nova.compute.manager [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 957.148543] env[62096]: ERROR nova.compute.manager [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] _ensure_no_port_binding_failure(port) [ 957.148543] env[62096]: ERROR nova.compute.manager [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 957.148543] env[62096]: ERROR nova.compute.manager [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] raise exception.PortBindingFailed(port_id=port['id']) [ 957.148543] env[62096]: ERROR nova.compute.manager [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] nova.exception.PortBindingFailed: Binding failed for port b8cb63f1-3918-4d7e-9445-e474542f97cc, please check neutron logs for more information. [ 957.148543] env[62096]: ERROR nova.compute.manager [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] [ 957.148543] env[62096]: INFO nova.compute.manager [None req-2bf30cf6-b5fd-47ba-bb40-34c6f1f16b6f tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] Terminating instance [ 957.149526] env[62096]: DEBUG oslo_concurrency.lockutils [None req-2bf30cf6-b5fd-47ba-bb40-34c6f1f16b6f tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Acquiring lock "refresh_cache-8a9e2396-2585-4fb8-9f92-76d4d1578a95" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 957.149682] env[62096]: DEBUG oslo_concurrency.lockutils [None req-2bf30cf6-b5fd-47ba-bb40-34c6f1f16b6f tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Acquired lock "refresh_cache-8a9e2396-2585-4fb8-9f92-76d4d1578a95" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 957.149843] env[62096]: DEBUG nova.network.neutron [None req-2bf30cf6-b5fd-47ba-bb40-34c6f1f16b6f tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 957.150770] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-2bf30cf6-b5fd-47ba-bb40-34c6f1f16b6f tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg cfd19412871c4feba3f19e26330a92bb in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 957.158572] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cfd19412871c4feba3f19e26330a92bb [ 957.250385] env[62096]: DEBUG nova.scheduler.client.report [None req-01739955-c4cc-4cd7-9c77-96b1dea4ad7a tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 957.252835] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-01739955-c4cc-4cd7-9c77-96b1dea4ad7a tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg 4e611b0112eb41da9b3b5608c4a74fc9 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 957.267531] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4e611b0112eb41da9b3b5608c4a74fc9 [ 957.346335] env[62096]: DEBUG oslo_concurrency.lockutils [req-c932e473-0985-4458-b347-fee2b1daebd0 req-ebfef654-5e42-443f-9b03-c8d919188c84 service nova] Releasing lock "refresh_cache-db3c7e13-12d9-466d-9823-a0a107949588" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 957.346592] env[62096]: DEBUG nova.compute.manager [req-c932e473-0985-4458-b347-fee2b1daebd0 req-ebfef654-5e42-443f-9b03-c8d919188c84 service nova] [instance: db3c7e13-12d9-466d-9823-a0a107949588] Received event network-vif-deleted-356e995c-488b-4cba-bff7-57d3e11bf041 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 957.666756] env[62096]: DEBUG nova.network.neutron [None req-2bf30cf6-b5fd-47ba-bb40-34c6f1f16b6f tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 957.742159] env[62096]: DEBUG nova.network.neutron [None req-2bf30cf6-b5fd-47ba-bb40-34c6f1f16b6f tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 957.742683] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-2bf30cf6-b5fd-47ba-bb40-34c6f1f16b6f tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg 8ea40276a3204d0c87853716ae69a88c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 957.750931] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8ea40276a3204d0c87853716ae69a88c [ 957.755126] env[62096]: DEBUG oslo_concurrency.lockutils [None req-01739955-c4cc-4cd7-9c77-96b1dea4ad7a tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.671s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 957.755716] env[62096]: ERROR nova.compute.manager [None req-01739955-c4cc-4cd7-9c77-96b1dea4ad7a tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port b59a00a8-d38c-41d2-8e43-14ab1a189145, please check neutron logs for more information. [ 957.755716] env[62096]: ERROR nova.compute.manager [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] Traceback (most recent call last): [ 957.755716] env[62096]: ERROR nova.compute.manager [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 957.755716] env[62096]: ERROR nova.compute.manager [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] self.driver.spawn(context, instance, image_meta, [ 957.755716] env[62096]: ERROR nova.compute.manager [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 957.755716] env[62096]: ERROR nova.compute.manager [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 957.755716] env[62096]: ERROR nova.compute.manager [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 957.755716] env[62096]: ERROR nova.compute.manager [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] vm_ref = self.build_virtual_machine(instance, [ 957.755716] env[62096]: ERROR nova.compute.manager [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 957.755716] env[62096]: ERROR nova.compute.manager [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] vif_infos = vmwarevif.get_vif_info(self._session, [ 957.755716] env[62096]: ERROR nova.compute.manager [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 957.756133] env[62096]: ERROR nova.compute.manager [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] for vif in network_info: [ 957.756133] env[62096]: ERROR nova.compute.manager [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 957.756133] env[62096]: ERROR nova.compute.manager [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] return self._sync_wrapper(fn, *args, **kwargs) [ 957.756133] env[62096]: ERROR nova.compute.manager [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 957.756133] env[62096]: ERROR nova.compute.manager [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] self.wait() [ 957.756133] env[62096]: ERROR nova.compute.manager [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 957.756133] env[62096]: ERROR nova.compute.manager [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] self[:] = self._gt.wait() [ 957.756133] env[62096]: ERROR nova.compute.manager [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 957.756133] env[62096]: ERROR nova.compute.manager [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] return self._exit_event.wait() [ 957.756133] env[62096]: ERROR nova.compute.manager [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 957.756133] env[62096]: ERROR nova.compute.manager [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] current.throw(*self._exc) [ 957.756133] env[62096]: ERROR nova.compute.manager [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 957.756133] env[62096]: ERROR nova.compute.manager [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] result = function(*args, **kwargs) [ 957.756555] env[62096]: ERROR nova.compute.manager [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 957.756555] env[62096]: ERROR nova.compute.manager [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] return func(*args, **kwargs) [ 957.756555] env[62096]: ERROR nova.compute.manager [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 957.756555] env[62096]: ERROR nova.compute.manager [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] raise e [ 957.756555] env[62096]: ERROR nova.compute.manager [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 957.756555] env[62096]: ERROR nova.compute.manager [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] nwinfo = self.network_api.allocate_for_instance( [ 957.756555] env[62096]: ERROR nova.compute.manager [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 957.756555] env[62096]: ERROR nova.compute.manager [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] created_port_ids = self._update_ports_for_instance( [ 957.756555] env[62096]: ERROR nova.compute.manager [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 957.756555] env[62096]: ERROR nova.compute.manager [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] with excutils.save_and_reraise_exception(): [ 957.756555] env[62096]: ERROR nova.compute.manager [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 957.756555] env[62096]: ERROR nova.compute.manager [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] self.force_reraise() [ 957.756555] env[62096]: ERROR nova.compute.manager [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 957.756963] env[62096]: ERROR nova.compute.manager [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] raise self.value [ 957.756963] env[62096]: ERROR nova.compute.manager [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 957.756963] env[62096]: ERROR nova.compute.manager [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] updated_port = self._update_port( [ 957.756963] env[62096]: ERROR nova.compute.manager [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 957.756963] env[62096]: ERROR nova.compute.manager [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] _ensure_no_port_binding_failure(port) [ 957.756963] env[62096]: ERROR nova.compute.manager [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 957.756963] env[62096]: ERROR nova.compute.manager [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] raise exception.PortBindingFailed(port_id=port['id']) [ 957.756963] env[62096]: ERROR nova.compute.manager [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] nova.exception.PortBindingFailed: Binding failed for port b59a00a8-d38c-41d2-8e43-14ab1a189145, please check neutron logs for more information. [ 957.756963] env[62096]: ERROR nova.compute.manager [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] [ 957.756963] env[62096]: DEBUG nova.compute.utils [None req-01739955-c4cc-4cd7-9c77-96b1dea4ad7a tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] Binding failed for port b59a00a8-d38c-41d2-8e43-14ab1a189145, please check neutron logs for more information. {{(pid=62096) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 957.757790] env[62096]: DEBUG nova.compute.manager [None req-01739955-c4cc-4cd7-9c77-96b1dea4ad7a tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] Build of instance 9968505a-c4cf-4ed5-9bec-0853d1d2840c was re-scheduled: Binding failed for port b59a00a8-d38c-41d2-8e43-14ab1a189145, please check neutron logs for more information. {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 957.758215] env[62096]: DEBUG nova.compute.manager [None req-01739955-c4cc-4cd7-9c77-96b1dea4ad7a tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] Unplugging VIFs for instance {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 957.758441] env[62096]: DEBUG oslo_concurrency.lockutils [None req-01739955-c4cc-4cd7-9c77-96b1dea4ad7a tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Acquiring lock "refresh_cache-9968505a-c4cf-4ed5-9bec-0853d1d2840c" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 957.758594] env[62096]: DEBUG oslo_concurrency.lockutils [None req-01739955-c4cc-4cd7-9c77-96b1dea4ad7a tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Acquired lock "refresh_cache-9968505a-c4cf-4ed5-9bec-0853d1d2840c" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 957.758757] env[62096]: DEBUG nova.network.neutron [None req-01739955-c4cc-4cd7-9c77-96b1dea4ad7a tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 957.759163] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-01739955-c4cc-4cd7-9c77-96b1dea4ad7a tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg 1d6fe05a21c84b00aa3fcea873459447 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 957.760470] env[62096]: DEBUG oslo_concurrency.lockutils [None req-38f13e88-01cf-4f03-bbb4-543be09424f3 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.787s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 957.760705] env[62096]: DEBUG nova.objects.instance [None req-38f13e88-01cf-4f03-bbb4-543be09424f3 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Lazy-loading 'resources' on Instance uuid 9f3ce570-878f-48bf-a08c-0387b0556785 {{(pid=62096) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 957.761011] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-38f13e88-01cf-4f03-bbb4-543be09424f3 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Expecting reply to msg f4c4e3e3751043cabd5812d23730252e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 957.766543] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f4c4e3e3751043cabd5812d23730252e [ 957.767203] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1d6fe05a21c84b00aa3fcea873459447 [ 957.891094] env[62096]: DEBUG oslo_service.periodic_task [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62096) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 957.891330] env[62096]: DEBUG oslo_service.periodic_task [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62096) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 957.891478] env[62096]: DEBUG nova.compute.manager [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Starting heal instance info cache {{(pid=62096) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9926}} [ 957.891606] env[62096]: DEBUG nova.compute.manager [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Rebuilding the list of instances to heal {{(pid=62096) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 957.892242] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg 2a3f9b60be6b4c149b6e8addcedaef10 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 957.905453] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2a3f9b60be6b4c149b6e8addcedaef10 [ 958.245224] env[62096]: DEBUG oslo_concurrency.lockutils [None req-2bf30cf6-b5fd-47ba-bb40-34c6f1f16b6f tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Releasing lock "refresh_cache-8a9e2396-2585-4fb8-9f92-76d4d1578a95" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 958.245580] env[62096]: DEBUG nova.compute.manager [None req-2bf30cf6-b5fd-47ba-bb40-34c6f1f16b6f tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] Start destroying the instance on the hypervisor. {{(pid=62096) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 958.245776] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-2bf30cf6-b5fd-47ba-bb40-34c6f1f16b6f tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] Destroying instance {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 958.246084] env[62096]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-cfaffc95-b148-4469-87a1-cad3e5ca7ade {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.255392] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91907341-b009-4515-b980-63472a6ac995 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.285594] env[62096]: WARNING nova.virt.vmwareapi.vmops [None req-2bf30cf6-b5fd-47ba-bb40-34c6f1f16b6f tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 8a9e2396-2585-4fb8-9f92-76d4d1578a95 could not be found. [ 958.285815] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-2bf30cf6-b5fd-47ba-bb40-34c6f1f16b6f tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] Instance destroyed {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 958.285999] env[62096]: INFO nova.compute.manager [None req-2bf30cf6-b5fd-47ba-bb40-34c6f1f16b6f tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] Took 0.04 seconds to destroy the instance on the hypervisor. [ 958.286274] env[62096]: DEBUG oslo.service.loopingcall [None req-2bf30cf6-b5fd-47ba-bb40-34c6f1f16b6f tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 958.287417] env[62096]: DEBUG nova.network.neutron [None req-01739955-c4cc-4cd7-9c77-96b1dea4ad7a tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 958.292329] env[62096]: DEBUG nova.compute.manager [-] [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 958.292428] env[62096]: DEBUG nova.network.neutron [-] [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 958.294561] env[62096]: DEBUG nova.compute.manager [req-d54273df-250a-4d25-a322-587feff26b59 req-136d7d76-dd8a-4966-a82b-e266623896fb service nova] [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] Received event network-changed-b8cb63f1-3918-4d7e-9445-e474542f97cc {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 958.295201] env[62096]: DEBUG nova.compute.manager [req-d54273df-250a-4d25-a322-587feff26b59 req-136d7d76-dd8a-4966-a82b-e266623896fb service nova] [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] Refreshing instance network info cache due to event network-changed-b8cb63f1-3918-4d7e-9445-e474542f97cc. {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 958.295201] env[62096]: DEBUG oslo_concurrency.lockutils [req-d54273df-250a-4d25-a322-587feff26b59 req-136d7d76-dd8a-4966-a82b-e266623896fb service nova] Acquiring lock "refresh_cache-8a9e2396-2585-4fb8-9f92-76d4d1578a95" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 958.295201] env[62096]: DEBUG oslo_concurrency.lockutils [req-d54273df-250a-4d25-a322-587feff26b59 req-136d7d76-dd8a-4966-a82b-e266623896fb service nova] Acquired lock "refresh_cache-8a9e2396-2585-4fb8-9f92-76d4d1578a95" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 958.295341] env[62096]: DEBUG nova.network.neutron [req-d54273df-250a-4d25-a322-587feff26b59 req-136d7d76-dd8a-4966-a82b-e266623896fb service nova] [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] Refreshing network info cache for port b8cb63f1-3918-4d7e-9445-e474542f97cc {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 958.296027] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-d54273df-250a-4d25-a322-587feff26b59 req-136d7d76-dd8a-4966-a82b-e266623896fb service nova] Expecting reply to msg 00594839751f49efb972e71e71a64860 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 958.304122] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 00594839751f49efb972e71e71a64860 [ 958.310914] env[62096]: DEBUG nova.network.neutron [-] [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 958.311362] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 666886b3cae046abb0d6d76e5f2933cc in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 958.318140] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 666886b3cae046abb0d6d76e5f2933cc [ 958.368877] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b84c6af-64f5-44e9-b2e2-94bb660326d1 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.371304] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cd4b1a8-7025-447d-878d-527f3b236386 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.399952] env[62096]: DEBUG nova.compute.manager [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] [instance: db3c7e13-12d9-466d-9823-a0a107949588] Skipping network cache update for instance because it is Building. {{(pid=62096) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9939}} [ 958.400121] env[62096]: DEBUG nova.compute.manager [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] Skipping network cache update for instance because it is Building. {{(pid=62096) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9939}} [ 958.402618] env[62096]: DEBUG nova.network.neutron [None req-01739955-c4cc-4cd7-9c77-96b1dea4ad7a tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 958.403088] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-01739955-c4cc-4cd7-9c77-96b1dea4ad7a tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg 1e0749469e4e483897eb794eaaa722f9 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 958.404940] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d2ecfe7-8c9e-4045-b994-d8a9343424b2 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.412070] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ea74411-5b53-4e2a-b203-04fe837ae822 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.416760] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1e0749469e4e483897eb794eaaa722f9 [ 958.425057] env[62096]: DEBUG nova.compute.provider_tree [None req-38f13e88-01cf-4f03-bbb4-543be09424f3 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 958.425516] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-38f13e88-01cf-4f03-bbb4-543be09424f3 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Expecting reply to msg 2ce10099fa7549f7890e7fad2793d822 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 958.432043] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2ce10099fa7549f7890e7fad2793d822 [ 958.434467] env[62096]: DEBUG oslo_concurrency.lockutils [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Acquiring lock "refresh_cache-dc451ce2-65de-4497-ad48-fd776f73cb80" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 958.434604] env[62096]: DEBUG oslo_concurrency.lockutils [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Acquired lock "refresh_cache-dc451ce2-65de-4497-ad48-fd776f73cb80" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 958.434746] env[62096]: DEBUG nova.network.neutron [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] [instance: dc451ce2-65de-4497-ad48-fd776f73cb80] Forcefully refreshing network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2004}} [ 958.434899] env[62096]: DEBUG nova.objects.instance [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Lazy-loading 'info_cache' on Instance uuid dc451ce2-65de-4497-ad48-fd776f73cb80 {{(pid=62096) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 958.435432] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg 9323e2e861694df19b4e91938215231d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 958.444708] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9323e2e861694df19b4e91938215231d [ 958.812294] env[62096]: DEBUG nova.network.neutron [req-d54273df-250a-4d25-a322-587feff26b59 req-136d7d76-dd8a-4966-a82b-e266623896fb service nova] [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 958.813929] env[62096]: DEBUG nova.network.neutron [-] [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 958.814344] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 77b06b8ed2a54220a588ba8edd30129f in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 958.822082] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 77b06b8ed2a54220a588ba8edd30129f [ 958.878708] env[62096]: DEBUG nova.network.neutron [req-d54273df-250a-4d25-a322-587feff26b59 req-136d7d76-dd8a-4966-a82b-e266623896fb service nova] [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 958.879207] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-d54273df-250a-4d25-a322-587feff26b59 req-136d7d76-dd8a-4966-a82b-e266623896fb service nova] Expecting reply to msg ceefa668a0d249d3ad8c40d1388df429 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 958.887244] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ceefa668a0d249d3ad8c40d1388df429 [ 958.908228] env[62096]: DEBUG oslo_concurrency.lockutils [None req-01739955-c4cc-4cd7-9c77-96b1dea4ad7a tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Releasing lock "refresh_cache-9968505a-c4cf-4ed5-9bec-0853d1d2840c" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 958.908449] env[62096]: DEBUG nova.compute.manager [None req-01739955-c4cc-4cd7-9c77-96b1dea4ad7a tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 958.908843] env[62096]: DEBUG nova.compute.manager [None req-01739955-c4cc-4cd7-9c77-96b1dea4ad7a tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 958.908843] env[62096]: DEBUG nova.network.neutron [None req-01739955-c4cc-4cd7-9c77-96b1dea4ad7a tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 958.921829] env[62096]: DEBUG nova.network.neutron [None req-01739955-c4cc-4cd7-9c77-96b1dea4ad7a tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 958.922371] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-01739955-c4cc-4cd7-9c77-96b1dea4ad7a tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg 786b643b110c4f73838c41db8a35bcb8 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 958.927373] env[62096]: DEBUG nova.scheduler.client.report [None req-38f13e88-01cf-4f03-bbb4-543be09424f3 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 958.929919] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-38f13e88-01cf-4f03-bbb4-543be09424f3 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Expecting reply to msg 948ecf457ab94f10801e0fa383b349af in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 958.933151] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 786b643b110c4f73838c41db8a35bcb8 [ 958.937720] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg 9b7c5225b35e4d89a801647bfa7c129f in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 958.941323] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 948ecf457ab94f10801e0fa383b349af [ 958.943442] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9b7c5225b35e4d89a801647bfa7c129f [ 959.316741] env[62096]: INFO nova.compute.manager [-] [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] Took 1.02 seconds to deallocate network for instance. [ 959.319031] env[62096]: DEBUG nova.compute.claims [None req-2bf30cf6-b5fd-47ba-bb40-34c6f1f16b6f tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] Aborting claim: {{(pid=62096) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 959.319203] env[62096]: DEBUG oslo_concurrency.lockutils [None req-2bf30cf6-b5fd-47ba-bb40-34c6f1f16b6f tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 959.381534] env[62096]: DEBUG oslo_concurrency.lockutils [req-d54273df-250a-4d25-a322-587feff26b59 req-136d7d76-dd8a-4966-a82b-e266623896fb service nova] Releasing lock "refresh_cache-8a9e2396-2585-4fb8-9f92-76d4d1578a95" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 959.381745] env[62096]: DEBUG nova.compute.manager [req-d54273df-250a-4d25-a322-587feff26b59 req-136d7d76-dd8a-4966-a82b-e266623896fb service nova] [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] Received event network-vif-deleted-b8cb63f1-3918-4d7e-9445-e474542f97cc {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 959.424600] env[62096]: DEBUG nova.network.neutron [None req-01739955-c4cc-4cd7-9c77-96b1dea4ad7a tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 959.425138] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-01739955-c4cc-4cd7-9c77-96b1dea4ad7a tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg aff815bd240b4bf1af72964447b24020 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 959.433968] env[62096]: DEBUG oslo_concurrency.lockutils [None req-38f13e88-01cf-4f03-bbb4-543be09424f3 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.674s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 959.436288] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg aff815bd240b4bf1af72964447b24020 [ 959.436910] env[62096]: DEBUG oslo_concurrency.lockutils [None req-76cc4f6b-4345-45aa-869a-e02283bb07dd tempest-ServerActionsTestOtherA-356376316 tempest-ServerActionsTestOtherA-356376316-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.986s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 959.438356] env[62096]: INFO nova.compute.claims [None req-76cc4f6b-4345-45aa-869a-e02283bb07dd tempest-ServerActionsTestOtherA-356376316 tempest-ServerActionsTestOtherA-356376316-project-member] [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 959.440012] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-76cc4f6b-4345-45aa-869a-e02283bb07dd tempest-ServerActionsTestOtherA-356376316 tempest-ServerActionsTestOtherA-356376316-project-member] Expecting reply to msg dd0ea5a500e34129a6bdf4b8d0e73b0f in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 959.451903] env[62096]: INFO nova.scheduler.client.report [None req-38f13e88-01cf-4f03-bbb4-543be09424f3 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Deleted allocations for instance 9f3ce570-878f-48bf-a08c-0387b0556785 [ 959.454494] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-38f13e88-01cf-4f03-bbb4-543be09424f3 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Expecting reply to msg fe285eb4f1a24a56a94e61284d1646c7 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 959.456818] env[62096]: DEBUG nova.network.neutron [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] [instance: dc451ce2-65de-4497-ad48-fd776f73cb80] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 959.457098] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg aa5e4785e80c45868771e82dba838fab in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 959.470029] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg aa5e4785e80c45868771e82dba838fab [ 959.482899] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dd0ea5a500e34129a6bdf4b8d0e73b0f [ 959.490411] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fe285eb4f1a24a56a94e61284d1646c7 [ 959.927541] env[62096]: INFO nova.compute.manager [None req-01739955-c4cc-4cd7-9c77-96b1dea4ad7a tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 9968505a-c4cf-4ed5-9bec-0853d1d2840c] Took 1.02 seconds to deallocate network for instance. [ 959.929260] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-01739955-c4cc-4cd7-9c77-96b1dea4ad7a tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg 2d761bc3ecfe47a0bce7c8e958a8d2ad in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 959.946223] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-76cc4f6b-4345-45aa-869a-e02283bb07dd tempest-ServerActionsTestOtherA-356376316 tempest-ServerActionsTestOtherA-356376316-project-member] Expecting reply to msg 3ace9184af4143ed91aab2962327066f in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 959.953677] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3ace9184af4143ed91aab2962327066f [ 959.960748] env[62096]: DEBUG oslo_concurrency.lockutils [None req-38f13e88-01cf-4f03-bbb4-543be09424f3 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Lock "9f3ce570-878f-48bf-a08c-0387b0556785" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.687s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 959.961078] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-38f13e88-01cf-4f03-bbb4-543be09424f3 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Expecting reply to msg d47ef2d7f4e444fdbc1a2d261f37df8b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 959.962156] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2d761bc3ecfe47a0bce7c8e958a8d2ad [ 959.974358] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d47ef2d7f4e444fdbc1a2d261f37df8b [ 960.015044] env[62096]: DEBUG nova.network.neutron [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] [instance: dc451ce2-65de-4497-ad48-fd776f73cb80] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 960.015571] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg 0d5712c82210423b9a680286407715bd in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 960.036927] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0d5712c82210423b9a680286407715bd [ 960.433938] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-01739955-c4cc-4cd7-9c77-96b1dea4ad7a tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg b56d1dba95214e959829bc9d041f42e3 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 960.476647] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b56d1dba95214e959829bc9d041f42e3 [ 960.491180] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-f7e5761e-f8c7-4c8c-8348-b19537e41ea1 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Expecting reply to msg 52bd603f00944b6e81a300baad86291a in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 960.500696] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 52bd603f00944b6e81a300baad86291a [ 960.519329] env[62096]: DEBUG oslo_concurrency.lockutils [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Releasing lock "refresh_cache-dc451ce2-65de-4497-ad48-fd776f73cb80" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 960.519540] env[62096]: DEBUG nova.compute.manager [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] [instance: dc451ce2-65de-4497-ad48-fd776f73cb80] Updated the network info_cache for instance {{(pid=62096) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 960.520187] env[62096]: DEBUG oslo_service.periodic_task [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62096) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 960.520432] env[62096]: DEBUG oslo_service.periodic_task [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62096) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 960.520586] env[62096]: DEBUG oslo_service.periodic_task [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62096) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 960.520908] env[62096]: DEBUG oslo_service.periodic_task [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62096) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 960.521399] env[62096]: DEBUG oslo_service.periodic_task [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62096) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 960.521583] env[62096]: DEBUG oslo_service.periodic_task [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62096) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 960.521718] env[62096]: DEBUG nova.compute.manager [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62096) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10545}} [ 960.521864] env[62096]: DEBUG oslo_service.periodic_task [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62096) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 960.522203] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg 9392f39bd79946d08ef5f60fe3794aac in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 960.531961] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9392f39bd79946d08ef5f60fe3794aac [ 960.533696] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec92af8e-f1cd-4dfd-882d-89b26d90ea49 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.542204] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed974c2c-5ffc-4971-87e3-aaac915bd7e3 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.575416] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2e056ec-49b6-477a-9116-debcabac7071 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.585297] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3c41b2a-0de7-4465-93fe-4f50516b1664 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.598555] env[62096]: DEBUG nova.compute.provider_tree [None req-76cc4f6b-4345-45aa-869a-e02283bb07dd tempest-ServerActionsTestOtherA-356376316 tempest-ServerActionsTestOtherA-356376316-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 960.599044] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-76cc4f6b-4345-45aa-869a-e02283bb07dd tempest-ServerActionsTestOtherA-356376316 tempest-ServerActionsTestOtherA-356376316-project-member] Expecting reply to msg 26c054e0212941a6a4d8671ee47e4e16 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 960.606147] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 26c054e0212941a6a4d8671ee47e4e16 [ 960.953814] env[62096]: INFO nova.scheduler.client.report [None req-01739955-c4cc-4cd7-9c77-96b1dea4ad7a tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Deleted allocations for instance 9968505a-c4cf-4ed5-9bec-0853d1d2840c [ 960.960023] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-01739955-c4cc-4cd7-9c77-96b1dea4ad7a tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg 46d7a05276e34ec48fac27bead9810ba in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 960.970054] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 46d7a05276e34ec48fac27bead9810ba [ 960.996151] env[62096]: DEBUG oslo_concurrency.lockutils [None req-f7e5761e-f8c7-4c8c-8348-b19537e41ea1 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Acquiring lock "dc451ce2-65de-4497-ad48-fd776f73cb80" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 960.996412] env[62096]: DEBUG oslo_concurrency.lockutils [None req-f7e5761e-f8c7-4c8c-8348-b19537e41ea1 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Lock "dc451ce2-65de-4497-ad48-fd776f73cb80" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 960.996720] env[62096]: DEBUG oslo_concurrency.lockutils [None req-f7e5761e-f8c7-4c8c-8348-b19537e41ea1 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Acquiring lock "dc451ce2-65de-4497-ad48-fd776f73cb80-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 960.996913] env[62096]: DEBUG oslo_concurrency.lockutils [None req-f7e5761e-f8c7-4c8c-8348-b19537e41ea1 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Lock "dc451ce2-65de-4497-ad48-fd776f73cb80-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 960.997074] env[62096]: DEBUG oslo_concurrency.lockutils [None req-f7e5761e-f8c7-4c8c-8348-b19537e41ea1 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Lock "dc451ce2-65de-4497-ad48-fd776f73cb80-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 960.999069] env[62096]: INFO nova.compute.manager [None req-f7e5761e-f8c7-4c8c-8348-b19537e41ea1 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] [instance: dc451ce2-65de-4497-ad48-fd776f73cb80] Terminating instance [ 961.000605] env[62096]: DEBUG oslo_concurrency.lockutils [None req-f7e5761e-f8c7-4c8c-8348-b19537e41ea1 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Acquiring lock "refresh_cache-dc451ce2-65de-4497-ad48-fd776f73cb80" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 961.000758] env[62096]: DEBUG oslo_concurrency.lockutils [None req-f7e5761e-f8c7-4c8c-8348-b19537e41ea1 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Acquired lock "refresh_cache-dc451ce2-65de-4497-ad48-fd776f73cb80" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 961.000917] env[62096]: DEBUG nova.network.neutron [None req-f7e5761e-f8c7-4c8c-8348-b19537e41ea1 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] [instance: dc451ce2-65de-4497-ad48-fd776f73cb80] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 961.001303] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-f7e5761e-f8c7-4c8c-8348-b19537e41ea1 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Expecting reply to msg fb524b9ef35841be908bd3a3a9be8d95 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 961.010646] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fb524b9ef35841be908bd3a3a9be8d95 [ 961.025142] env[62096]: DEBUG oslo_concurrency.lockutils [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 961.101720] env[62096]: DEBUG nova.scheduler.client.report [None req-76cc4f6b-4345-45aa-869a-e02283bb07dd tempest-ServerActionsTestOtherA-356376316 tempest-ServerActionsTestOtherA-356376316-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 961.104212] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-76cc4f6b-4345-45aa-869a-e02283bb07dd tempest-ServerActionsTestOtherA-356376316 tempest-ServerActionsTestOtherA-356376316-project-member] Expecting reply to msg 9aeb02be55234260abe1713f1be40fcc in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 961.116329] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9aeb02be55234260abe1713f1be40fcc [ 961.461863] env[62096]: DEBUG oslo_concurrency.lockutils [None req-01739955-c4cc-4cd7-9c77-96b1dea4ad7a tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Lock "9968505a-c4cf-4ed5-9bec-0853d1d2840c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.467s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 961.521113] env[62096]: DEBUG nova.network.neutron [None req-f7e5761e-f8c7-4c8c-8348-b19537e41ea1 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] [instance: dc451ce2-65de-4497-ad48-fd776f73cb80] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 961.574700] env[62096]: DEBUG nova.network.neutron [None req-f7e5761e-f8c7-4c8c-8348-b19537e41ea1 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] [instance: dc451ce2-65de-4497-ad48-fd776f73cb80] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 961.575986] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-f7e5761e-f8c7-4c8c-8348-b19537e41ea1 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Expecting reply to msg 3429a9e4870e4b6aa89b071eda97be43 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 961.584974] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3429a9e4870e4b6aa89b071eda97be43 [ 961.607180] env[62096]: DEBUG oslo_concurrency.lockutils [None req-76cc4f6b-4345-45aa-869a-e02283bb07dd tempest-ServerActionsTestOtherA-356376316 tempest-ServerActionsTestOtherA-356376316-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.170s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 961.607868] env[62096]: DEBUG nova.compute.manager [None req-76cc4f6b-4345-45aa-869a-e02283bb07dd tempest-ServerActionsTestOtherA-356376316 tempest-ServerActionsTestOtherA-356376316-project-member] [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] Start building networks asynchronously for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 961.610638] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-76cc4f6b-4345-45aa-869a-e02283bb07dd tempest-ServerActionsTestOtherA-356376316 tempest-ServerActionsTestOtherA-356376316-project-member] Expecting reply to msg 7e1542fd013a4ac0b072b042734080ec in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 961.611899] env[62096]: DEBUG oslo_concurrency.lockutils [None req-cf26b410-85fb-4494-a97e-7e8a698de974 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 4.532s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 961.614571] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-cf26b410-85fb-4494-a97e-7e8a698de974 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg 4a51f412aa82445892a9ac5fb78a3f2b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 961.643881] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7e1542fd013a4ac0b072b042734080ec [ 961.656659] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4a51f412aa82445892a9ac5fb78a3f2b [ 962.078953] env[62096]: DEBUG oslo_concurrency.lockutils [None req-f7e5761e-f8c7-4c8c-8348-b19537e41ea1 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Releasing lock "refresh_cache-dc451ce2-65de-4497-ad48-fd776f73cb80" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 962.079348] env[62096]: DEBUG nova.compute.manager [None req-f7e5761e-f8c7-4c8c-8348-b19537e41ea1 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] [instance: dc451ce2-65de-4497-ad48-fd776f73cb80] Start destroying the instance on the hypervisor. {{(pid=62096) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 962.079547] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-f7e5761e-f8c7-4c8c-8348-b19537e41ea1 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] [instance: dc451ce2-65de-4497-ad48-fd776f73cb80] Destroying instance {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 962.080498] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55cf6d27-7bb0-41ca-9ebd-40da268249ef {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.088013] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7e5761e-f8c7-4c8c-8348-b19537e41ea1 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] [instance: dc451ce2-65de-4497-ad48-fd776f73cb80] Powering off the VM {{(pid=62096) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 962.088375] env[62096]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-38c70d67-7682-426d-8eb2-6e258dcf63fd {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.097811] env[62096]: DEBUG oslo_vmware.api [None req-f7e5761e-f8c7-4c8c-8348-b19537e41ea1 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Waiting for the task: (returnval){ [ 962.097811] env[62096]: value = "task-397490" [ 962.097811] env[62096]: _type = "Task" [ 962.097811] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.106481] env[62096]: DEBUG oslo_vmware.api [None req-f7e5761e-f8c7-4c8c-8348-b19537e41ea1 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Task: {'id': task-397490, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.118113] env[62096]: DEBUG nova.compute.utils [None req-76cc4f6b-4345-45aa-869a-e02283bb07dd tempest-ServerActionsTestOtherA-356376316 tempest-ServerActionsTestOtherA-356376316-project-member] Using /dev/sd instead of None {{(pid=62096) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 962.118723] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-76cc4f6b-4345-45aa-869a-e02283bb07dd tempest-ServerActionsTestOtherA-356376316 tempest-ServerActionsTestOtherA-356376316-project-member] Expecting reply to msg cde14e1cefd440c5a81f248f7350e19c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 962.123166] env[62096]: DEBUG nova.compute.manager [None req-76cc4f6b-4345-45aa-869a-e02283bb07dd tempest-ServerActionsTestOtherA-356376316 tempest-ServerActionsTestOtherA-356376316-project-member] [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] Allocating IP information in the background. {{(pid=62096) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 962.123399] env[62096]: DEBUG nova.network.neutron [None req-76cc4f6b-4345-45aa-869a-e02283bb07dd tempest-ServerActionsTestOtherA-356376316 tempest-ServerActionsTestOtherA-356376316-project-member] [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] allocate_for_instance() {{(pid=62096) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 962.142549] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cde14e1cefd440c5a81f248f7350e19c [ 962.176987] env[62096]: DEBUG nova.policy [None req-76cc4f6b-4345-45aa-869a-e02283bb07dd tempest-ServerActionsTestOtherA-356376316 tempest-ServerActionsTestOtherA-356376316-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b71518744a42457d8f6c8e83561ff51f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'eea78e6d054846528134f5206d0353a8', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62096) authorize /opt/stack/nova/nova/policy.py:203}} [ 962.227842] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aff6f587-2e70-46df-ac54-35e9ef5bd222 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.237023] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fba3c23a-c8ea-4702-9384-07a582505ff3 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.270919] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0356f4d1-57c5-48b4-ac27-d1b4222efe4a {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.277930] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bd14576-50cd-49d1-989d-193b819de5fe {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.290758] env[62096]: DEBUG nova.compute.provider_tree [None req-cf26b410-85fb-4494-a97e-7e8a698de974 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 962.291242] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-cf26b410-85fb-4494-a97e-7e8a698de974 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg 9f82891c29c940d9bd3360a14af5e961 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 962.297862] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9f82891c29c940d9bd3360a14af5e961 [ 962.468583] env[62096]: DEBUG nova.network.neutron [None req-76cc4f6b-4345-45aa-869a-e02283bb07dd tempest-ServerActionsTestOtherA-356376316 tempest-ServerActionsTestOtherA-356376316-project-member] [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] Successfully created port: 85a05da5-6b4d-464e-8794-c1cdaa138525 {{(pid=62096) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 962.608656] env[62096]: DEBUG oslo_vmware.api [None req-f7e5761e-f8c7-4c8c-8348-b19537e41ea1 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Task: {'id': task-397490, 'name': PowerOffVM_Task, 'duration_secs': 0.117717} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.608929] env[62096]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7e5761e-f8c7-4c8c-8348-b19537e41ea1 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] [instance: dc451ce2-65de-4497-ad48-fd776f73cb80] Powered off the VM {{(pid=62096) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 962.609094] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-f7e5761e-f8c7-4c8c-8348-b19537e41ea1 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] [instance: dc451ce2-65de-4497-ad48-fd776f73cb80] Unregistering the VM {{(pid=62096) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 962.609340] env[62096]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2e632434-e2c0-4bff-8055-32aac49aee43 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.624136] env[62096]: DEBUG nova.compute.manager [None req-76cc4f6b-4345-45aa-869a-e02283bb07dd tempest-ServerActionsTestOtherA-356376316 tempest-ServerActionsTestOtherA-356376316-project-member] [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] Start building block device mappings for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 962.627016] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-76cc4f6b-4345-45aa-869a-e02283bb07dd tempest-ServerActionsTestOtherA-356376316 tempest-ServerActionsTestOtherA-356376316-project-member] Expecting reply to msg 6a3cf36457e04d2d8bcb1995c7e7be88 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 962.640231] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-f7e5761e-f8c7-4c8c-8348-b19537e41ea1 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] [instance: dc451ce2-65de-4497-ad48-fd776f73cb80] Unregistered the VM {{(pid=62096) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 962.640231] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-f7e5761e-f8c7-4c8c-8348-b19537e41ea1 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] [instance: dc451ce2-65de-4497-ad48-fd776f73cb80] Deleting contents of the VM from datastore datastore2 {{(pid=62096) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 962.640231] env[62096]: DEBUG nova.virt.vmwareapi.ds_util [None req-f7e5761e-f8c7-4c8c-8348-b19537e41ea1 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Deleting the datastore file [datastore2] dc451ce2-65de-4497-ad48-fd776f73cb80 {{(pid=62096) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 962.640231] env[62096]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1e048d9e-38ee-4e7e-9adc-66a1a5476b92 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.643579] env[62096]: DEBUG oslo_vmware.api [None req-f7e5761e-f8c7-4c8c-8348-b19537e41ea1 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Waiting for the task: (returnval){ [ 962.643579] env[62096]: value = "task-397492" [ 962.643579] env[62096]: _type = "Task" [ 962.643579] env[62096]: } to complete. {{(pid=62096) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.658363] env[62096]: DEBUG oslo_vmware.api [None req-f7e5761e-f8c7-4c8c-8348-b19537e41ea1 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Task: {'id': task-397492, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.663942] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6a3cf36457e04d2d8bcb1995c7e7be88 [ 962.695302] env[62096]: DEBUG oslo_concurrency.lockutils [None req-031e832e-abd3-4e5b-8f81-81280bf69042 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Acquiring lock "66af0854-df0c-47ae-9f97-c62c55bf6601" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 962.695548] env[62096]: DEBUG oslo_concurrency.lockutils [None req-031e832e-abd3-4e5b-8f81-81280bf69042 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Lock "66af0854-df0c-47ae-9f97-c62c55bf6601" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 962.696198] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-031e832e-abd3-4e5b-8f81-81280bf69042 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg 88f17c1e9c1044549b3ae41f8a738361 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 962.704961] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 88f17c1e9c1044549b3ae41f8a738361 [ 962.793521] env[62096]: DEBUG nova.scheduler.client.report [None req-cf26b410-85fb-4494-a97e-7e8a698de974 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 962.796091] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-cf26b410-85fb-4494-a97e-7e8a698de974 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg 15621ff481fa4b2abad9c72fbedb3588 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 962.807264] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 15621ff481fa4b2abad9c72fbedb3588 [ 963.081880] env[62096]: DEBUG nova.compute.manager [req-2403274b-e459-4aed-81f7-9d71ae7cc9bf req-8dea125d-259f-4585-9390-117f586b63bb service nova] [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] Received event network-changed-85a05da5-6b4d-464e-8794-c1cdaa138525 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 963.082194] env[62096]: DEBUG nova.compute.manager [req-2403274b-e459-4aed-81f7-9d71ae7cc9bf req-8dea125d-259f-4585-9390-117f586b63bb service nova] [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] Refreshing instance network info cache due to event network-changed-85a05da5-6b4d-464e-8794-c1cdaa138525. {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 963.082491] env[62096]: DEBUG oslo_concurrency.lockutils [req-2403274b-e459-4aed-81f7-9d71ae7cc9bf req-8dea125d-259f-4585-9390-117f586b63bb service nova] Acquiring lock "refresh_cache-e6ce8ba5-170a-4221-9e41-15580876f28f" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 963.082635] env[62096]: DEBUG oslo_concurrency.lockutils [req-2403274b-e459-4aed-81f7-9d71ae7cc9bf req-8dea125d-259f-4585-9390-117f586b63bb service nova] Acquired lock "refresh_cache-e6ce8ba5-170a-4221-9e41-15580876f28f" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 963.082820] env[62096]: DEBUG nova.network.neutron [req-2403274b-e459-4aed-81f7-9d71ae7cc9bf req-8dea125d-259f-4585-9390-117f586b63bb service nova] [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] Refreshing network info cache for port 85a05da5-6b4d-464e-8794-c1cdaa138525 {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 963.083270] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-2403274b-e459-4aed-81f7-9d71ae7cc9bf req-8dea125d-259f-4585-9390-117f586b63bb service nova] Expecting reply to msg 8dc28b078c23403699d7e948794620c2 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 963.089625] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8dc28b078c23403699d7e948794620c2 [ 963.133518] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-76cc4f6b-4345-45aa-869a-e02283bb07dd tempest-ServerActionsTestOtherA-356376316 tempest-ServerActionsTestOtherA-356376316-project-member] Expecting reply to msg b3f2cf446cae4c618217cd52fd8ce12a in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 963.154220] env[62096]: DEBUG oslo_vmware.api [None req-f7e5761e-f8c7-4c8c-8348-b19537e41ea1 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Task: {'id': task-397492, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.087032} completed successfully. {{(pid=62096) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.154528] env[62096]: DEBUG nova.virt.vmwareapi.ds_util [None req-f7e5761e-f8c7-4c8c-8348-b19537e41ea1 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Deleted the datastore file {{(pid=62096) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 963.154625] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-f7e5761e-f8c7-4c8c-8348-b19537e41ea1 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] [instance: dc451ce2-65de-4497-ad48-fd776f73cb80] Deleted contents of the VM from datastore datastore2 {{(pid=62096) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 963.154794] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-f7e5761e-f8c7-4c8c-8348-b19537e41ea1 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] [instance: dc451ce2-65de-4497-ad48-fd776f73cb80] Instance destroyed {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 963.154955] env[62096]: INFO nova.compute.manager [None req-f7e5761e-f8c7-4c8c-8348-b19537e41ea1 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] [instance: dc451ce2-65de-4497-ad48-fd776f73cb80] Took 1.08 seconds to destroy the instance on the hypervisor. [ 963.155211] env[62096]: DEBUG oslo.service.loopingcall [None req-f7e5761e-f8c7-4c8c-8348-b19537e41ea1 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 963.155405] env[62096]: DEBUG nova.compute.manager [-] [instance: dc451ce2-65de-4497-ad48-fd776f73cb80] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 963.155502] env[62096]: DEBUG nova.network.neutron [-] [instance: dc451ce2-65de-4497-ad48-fd776f73cb80] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 963.172761] env[62096]: DEBUG nova.network.neutron [-] [instance: dc451ce2-65de-4497-ad48-fd776f73cb80] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 963.173289] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 7e69f62a58f347929e79f2a96d81da1d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 963.174909] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b3f2cf446cae4c618217cd52fd8ce12a [ 963.179872] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7e69f62a58f347929e79f2a96d81da1d [ 963.197821] env[62096]: DEBUG nova.compute.manager [None req-031e832e-abd3-4e5b-8f81-81280bf69042 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] Starting instance... {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 963.200110] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-031e832e-abd3-4e5b-8f81-81280bf69042 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg 2d0175d1aa224f68824c2fc4f8487fa2 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 963.229503] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2d0175d1aa224f68824c2fc4f8487fa2 [ 963.259869] env[62096]: ERROR nova.compute.manager [None req-76cc4f6b-4345-45aa-869a-e02283bb07dd tempest-ServerActionsTestOtherA-356376316 tempest-ServerActionsTestOtherA-356376316-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 85a05da5-6b4d-464e-8794-c1cdaa138525, please check neutron logs for more information. [ 963.259869] env[62096]: ERROR nova.compute.manager Traceback (most recent call last): [ 963.259869] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 963.259869] env[62096]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 963.259869] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 963.259869] env[62096]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 963.259869] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 963.259869] env[62096]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 963.259869] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 963.259869] env[62096]: ERROR nova.compute.manager self.force_reraise() [ 963.259869] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 963.259869] env[62096]: ERROR nova.compute.manager raise self.value [ 963.259869] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 963.259869] env[62096]: ERROR nova.compute.manager updated_port = self._update_port( [ 963.259869] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 963.259869] env[62096]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 963.260519] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 963.260519] env[62096]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 963.260519] env[62096]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 85a05da5-6b4d-464e-8794-c1cdaa138525, please check neutron logs for more information. [ 963.260519] env[62096]: ERROR nova.compute.manager [ 963.260519] env[62096]: Traceback (most recent call last): [ 963.260519] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 963.260519] env[62096]: listener.cb(fileno) [ 963.260519] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 963.260519] env[62096]: result = function(*args, **kwargs) [ 963.260519] env[62096]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 963.260519] env[62096]: return func(*args, **kwargs) [ 963.260519] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 963.260519] env[62096]: raise e [ 963.260519] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 963.260519] env[62096]: nwinfo = self.network_api.allocate_for_instance( [ 963.260519] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 963.260519] env[62096]: created_port_ids = self._update_ports_for_instance( [ 963.260519] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 963.260519] env[62096]: with excutils.save_and_reraise_exception(): [ 963.260519] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 963.260519] env[62096]: self.force_reraise() [ 963.260519] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 963.260519] env[62096]: raise self.value [ 963.260519] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 963.260519] env[62096]: updated_port = self._update_port( [ 963.260519] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 963.260519] env[62096]: _ensure_no_port_binding_failure(port) [ 963.260519] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 963.260519] env[62096]: raise exception.PortBindingFailed(port_id=port['id']) [ 963.261437] env[62096]: nova.exception.PortBindingFailed: Binding failed for port 85a05da5-6b4d-464e-8794-c1cdaa138525, please check neutron logs for more information. [ 963.261437] env[62096]: Removing descriptor: 14 [ 963.298256] env[62096]: DEBUG oslo_concurrency.lockutils [None req-cf26b410-85fb-4494-a97e-7e8a698de974 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.686s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 963.298844] env[62096]: ERROR nova.compute.manager [None req-cf26b410-85fb-4494-a97e-7e8a698de974 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: db3c7e13-12d9-466d-9823-a0a107949588] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 356e995c-488b-4cba-bff7-57d3e11bf041, please check neutron logs for more information. [ 963.298844] env[62096]: ERROR nova.compute.manager [instance: db3c7e13-12d9-466d-9823-a0a107949588] Traceback (most recent call last): [ 963.298844] env[62096]: ERROR nova.compute.manager [instance: db3c7e13-12d9-466d-9823-a0a107949588] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 963.298844] env[62096]: ERROR nova.compute.manager [instance: db3c7e13-12d9-466d-9823-a0a107949588] self.driver.spawn(context, instance, image_meta, [ 963.298844] env[62096]: ERROR nova.compute.manager [instance: db3c7e13-12d9-466d-9823-a0a107949588] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 963.298844] env[62096]: ERROR nova.compute.manager [instance: db3c7e13-12d9-466d-9823-a0a107949588] self._vmops.spawn(context, instance, image_meta, injected_files, [ 963.298844] env[62096]: ERROR nova.compute.manager [instance: db3c7e13-12d9-466d-9823-a0a107949588] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 963.298844] env[62096]: ERROR nova.compute.manager [instance: db3c7e13-12d9-466d-9823-a0a107949588] vm_ref = self.build_virtual_machine(instance, [ 963.298844] env[62096]: ERROR nova.compute.manager [instance: db3c7e13-12d9-466d-9823-a0a107949588] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 963.298844] env[62096]: ERROR nova.compute.manager [instance: db3c7e13-12d9-466d-9823-a0a107949588] vif_infos = vmwarevif.get_vif_info(self._session, [ 963.298844] env[62096]: ERROR nova.compute.manager [instance: db3c7e13-12d9-466d-9823-a0a107949588] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 963.299246] env[62096]: ERROR nova.compute.manager [instance: db3c7e13-12d9-466d-9823-a0a107949588] for vif in network_info: [ 963.299246] env[62096]: ERROR nova.compute.manager [instance: db3c7e13-12d9-466d-9823-a0a107949588] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 963.299246] env[62096]: ERROR nova.compute.manager [instance: db3c7e13-12d9-466d-9823-a0a107949588] return self._sync_wrapper(fn, *args, **kwargs) [ 963.299246] env[62096]: ERROR nova.compute.manager [instance: db3c7e13-12d9-466d-9823-a0a107949588] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 963.299246] env[62096]: ERROR nova.compute.manager [instance: db3c7e13-12d9-466d-9823-a0a107949588] self.wait() [ 963.299246] env[62096]: ERROR nova.compute.manager [instance: db3c7e13-12d9-466d-9823-a0a107949588] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 963.299246] env[62096]: ERROR nova.compute.manager [instance: db3c7e13-12d9-466d-9823-a0a107949588] self[:] = self._gt.wait() [ 963.299246] env[62096]: ERROR nova.compute.manager [instance: db3c7e13-12d9-466d-9823-a0a107949588] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 963.299246] env[62096]: ERROR nova.compute.manager [instance: db3c7e13-12d9-466d-9823-a0a107949588] return self._exit_event.wait() [ 963.299246] env[62096]: ERROR nova.compute.manager [instance: db3c7e13-12d9-466d-9823-a0a107949588] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 963.299246] env[62096]: ERROR nova.compute.manager [instance: db3c7e13-12d9-466d-9823-a0a107949588] current.throw(*self._exc) [ 963.299246] env[62096]: ERROR nova.compute.manager [instance: db3c7e13-12d9-466d-9823-a0a107949588] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 963.299246] env[62096]: ERROR nova.compute.manager [instance: db3c7e13-12d9-466d-9823-a0a107949588] result = function(*args, **kwargs) [ 963.299667] env[62096]: ERROR nova.compute.manager [instance: db3c7e13-12d9-466d-9823-a0a107949588] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 963.299667] env[62096]: ERROR nova.compute.manager [instance: db3c7e13-12d9-466d-9823-a0a107949588] return func(*args, **kwargs) [ 963.299667] env[62096]: ERROR nova.compute.manager [instance: db3c7e13-12d9-466d-9823-a0a107949588] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 963.299667] env[62096]: ERROR nova.compute.manager [instance: db3c7e13-12d9-466d-9823-a0a107949588] raise e [ 963.299667] env[62096]: ERROR nova.compute.manager [instance: db3c7e13-12d9-466d-9823-a0a107949588] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 963.299667] env[62096]: ERROR nova.compute.manager [instance: db3c7e13-12d9-466d-9823-a0a107949588] nwinfo = self.network_api.allocate_for_instance( [ 963.299667] env[62096]: ERROR nova.compute.manager [instance: db3c7e13-12d9-466d-9823-a0a107949588] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 963.299667] env[62096]: ERROR nova.compute.manager [instance: db3c7e13-12d9-466d-9823-a0a107949588] created_port_ids = self._update_ports_for_instance( [ 963.299667] env[62096]: ERROR nova.compute.manager [instance: db3c7e13-12d9-466d-9823-a0a107949588] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 963.299667] env[62096]: ERROR nova.compute.manager [instance: db3c7e13-12d9-466d-9823-a0a107949588] with excutils.save_and_reraise_exception(): [ 963.299667] env[62096]: ERROR nova.compute.manager [instance: db3c7e13-12d9-466d-9823-a0a107949588] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 963.299667] env[62096]: ERROR nova.compute.manager [instance: db3c7e13-12d9-466d-9823-a0a107949588] self.force_reraise() [ 963.299667] env[62096]: ERROR nova.compute.manager [instance: db3c7e13-12d9-466d-9823-a0a107949588] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 963.300096] env[62096]: ERROR nova.compute.manager [instance: db3c7e13-12d9-466d-9823-a0a107949588] raise self.value [ 963.300096] env[62096]: ERROR nova.compute.manager [instance: db3c7e13-12d9-466d-9823-a0a107949588] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 963.300096] env[62096]: ERROR nova.compute.manager [instance: db3c7e13-12d9-466d-9823-a0a107949588] updated_port = self._update_port( [ 963.300096] env[62096]: ERROR nova.compute.manager [instance: db3c7e13-12d9-466d-9823-a0a107949588] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 963.300096] env[62096]: ERROR nova.compute.manager [instance: db3c7e13-12d9-466d-9823-a0a107949588] _ensure_no_port_binding_failure(port) [ 963.300096] env[62096]: ERROR nova.compute.manager [instance: db3c7e13-12d9-466d-9823-a0a107949588] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 963.300096] env[62096]: ERROR nova.compute.manager [instance: db3c7e13-12d9-466d-9823-a0a107949588] raise exception.PortBindingFailed(port_id=port['id']) [ 963.300096] env[62096]: ERROR nova.compute.manager [instance: db3c7e13-12d9-466d-9823-a0a107949588] nova.exception.PortBindingFailed: Binding failed for port 356e995c-488b-4cba-bff7-57d3e11bf041, please check neutron logs for more information. [ 963.300096] env[62096]: ERROR nova.compute.manager [instance: db3c7e13-12d9-466d-9823-a0a107949588] [ 963.300096] env[62096]: DEBUG nova.compute.utils [None req-cf26b410-85fb-4494-a97e-7e8a698de974 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: db3c7e13-12d9-466d-9823-a0a107949588] Binding failed for port 356e995c-488b-4cba-bff7-57d3e11bf041, please check neutron logs for more information. {{(pid=62096) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 963.300767] env[62096]: DEBUG oslo_concurrency.lockutils [None req-2bf30cf6-b5fd-47ba-bb40-34c6f1f16b6f tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 3.982s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 963.302515] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-2bf30cf6-b5fd-47ba-bb40-34c6f1f16b6f tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg cf0d91656b8d48e9a5351a88f39ab846 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 963.304240] env[62096]: DEBUG nova.compute.manager [None req-cf26b410-85fb-4494-a97e-7e8a698de974 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: db3c7e13-12d9-466d-9823-a0a107949588] Build of instance db3c7e13-12d9-466d-9823-a0a107949588 was re-scheduled: Binding failed for port 356e995c-488b-4cba-bff7-57d3e11bf041, please check neutron logs for more information. {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 963.304676] env[62096]: DEBUG nova.compute.manager [None req-cf26b410-85fb-4494-a97e-7e8a698de974 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: db3c7e13-12d9-466d-9823-a0a107949588] Unplugging VIFs for instance {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 963.304850] env[62096]: DEBUG oslo_concurrency.lockutils [None req-cf26b410-85fb-4494-a97e-7e8a698de974 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Acquiring lock "refresh_cache-db3c7e13-12d9-466d-9823-a0a107949588" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 963.304993] env[62096]: DEBUG oslo_concurrency.lockutils [None req-cf26b410-85fb-4494-a97e-7e8a698de974 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Acquired lock "refresh_cache-db3c7e13-12d9-466d-9823-a0a107949588" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 963.305167] env[62096]: DEBUG nova.network.neutron [None req-cf26b410-85fb-4494-a97e-7e8a698de974 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: db3c7e13-12d9-466d-9823-a0a107949588] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 963.305522] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-cf26b410-85fb-4494-a97e-7e8a698de974 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg a6766f72d3aa4b34bbb10d4eb6068890 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 963.313165] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a6766f72d3aa4b34bbb10d4eb6068890 [ 963.335787] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cf0d91656b8d48e9a5351a88f39ab846 [ 963.600736] env[62096]: DEBUG nova.network.neutron [req-2403274b-e459-4aed-81f7-9d71ae7cc9bf req-8dea125d-259f-4585-9390-117f586b63bb service nova] [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 963.636635] env[62096]: DEBUG nova.compute.manager [None req-76cc4f6b-4345-45aa-869a-e02283bb07dd tempest-ServerActionsTestOtherA-356376316 tempest-ServerActionsTestOtherA-356376316-project-member] [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] Start spawning the instance on the hypervisor. {{(pid=62096) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 963.661847] env[62096]: DEBUG nova.virt.hardware [None req-76cc4f6b-4345-45aa-869a-e02283bb07dd tempest-ServerActionsTestOtherA-356376316 tempest-ServerActionsTestOtherA-356376316-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-19T11:52:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-19T11:51:55Z,direct_url=,disk_format='vmdk',id=fb764baa-3805-45c4-a694-aa91b0932110,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bd76085d004b408bbcf1ebf76ba2593e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-19T11:51:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 963.662095] env[62096]: DEBUG nova.virt.hardware [None req-76cc4f6b-4345-45aa-869a-e02283bb07dd tempest-ServerActionsTestOtherA-356376316 tempest-ServerActionsTestOtherA-356376316-project-member] Flavor limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 963.662261] env[62096]: DEBUG nova.virt.hardware [None req-76cc4f6b-4345-45aa-869a-e02283bb07dd tempest-ServerActionsTestOtherA-356376316 tempest-ServerActionsTestOtherA-356376316-project-member] Image limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 963.662440] env[62096]: DEBUG nova.virt.hardware [None req-76cc4f6b-4345-45aa-869a-e02283bb07dd tempest-ServerActionsTestOtherA-356376316 tempest-ServerActionsTestOtherA-356376316-project-member] Flavor pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 963.662580] env[62096]: DEBUG nova.virt.hardware [None req-76cc4f6b-4345-45aa-869a-e02283bb07dd tempest-ServerActionsTestOtherA-356376316 tempest-ServerActionsTestOtherA-356376316-project-member] Image pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 963.662719] env[62096]: DEBUG nova.virt.hardware [None req-76cc4f6b-4345-45aa-869a-e02283bb07dd tempest-ServerActionsTestOtherA-356376316 tempest-ServerActionsTestOtherA-356376316-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 963.662921] env[62096]: DEBUG nova.virt.hardware [None req-76cc4f6b-4345-45aa-869a-e02283bb07dd tempest-ServerActionsTestOtherA-356376316 tempest-ServerActionsTestOtherA-356376316-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 963.663069] env[62096]: DEBUG nova.virt.hardware [None req-76cc4f6b-4345-45aa-869a-e02283bb07dd tempest-ServerActionsTestOtherA-356376316 tempest-ServerActionsTestOtherA-356376316-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 963.663224] env[62096]: DEBUG nova.virt.hardware [None req-76cc4f6b-4345-45aa-869a-e02283bb07dd tempest-ServerActionsTestOtherA-356376316 tempest-ServerActionsTestOtherA-356376316-project-member] Got 1 possible topologies {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 963.663462] env[62096]: DEBUG nova.virt.hardware [None req-76cc4f6b-4345-45aa-869a-e02283bb07dd tempest-ServerActionsTestOtherA-356376316 tempest-ServerActionsTestOtherA-356376316-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 963.663541] env[62096]: DEBUG nova.virt.hardware [None req-76cc4f6b-4345-45aa-869a-e02283bb07dd tempest-ServerActionsTestOtherA-356376316 tempest-ServerActionsTestOtherA-356376316-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 963.664489] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4197978-47d3-494f-b81a-fd15fe6dbe69 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.672146] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8090e633-777c-40ba-bb77-5d72404bbd79 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.676364] env[62096]: DEBUG nova.network.neutron [req-2403274b-e459-4aed-81f7-9d71ae7cc9bf req-8dea125d-259f-4585-9390-117f586b63bb service nova] [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 963.676818] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-2403274b-e459-4aed-81f7-9d71ae7cc9bf req-8dea125d-259f-4585-9390-117f586b63bb service nova] Expecting reply to msg e569f64dfb2a4c079f33173dea12b894 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 963.677696] env[62096]: DEBUG nova.network.neutron [-] [instance: dc451ce2-65de-4497-ad48-fd776f73cb80] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 963.677894] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 9f88a197ee0847afa4fe7c04bc84dc86 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 963.688499] env[62096]: ERROR nova.compute.manager [None req-76cc4f6b-4345-45aa-869a-e02283bb07dd tempest-ServerActionsTestOtherA-356376316 tempest-ServerActionsTestOtherA-356376316-project-member] [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 85a05da5-6b4d-464e-8794-c1cdaa138525, please check neutron logs for more information. [ 963.688499] env[62096]: ERROR nova.compute.manager [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] Traceback (most recent call last): [ 963.688499] env[62096]: ERROR nova.compute.manager [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 963.688499] env[62096]: ERROR nova.compute.manager [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] yield resources [ 963.688499] env[62096]: ERROR nova.compute.manager [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 963.688499] env[62096]: ERROR nova.compute.manager [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] self.driver.spawn(context, instance, image_meta, [ 963.688499] env[62096]: ERROR nova.compute.manager [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 963.688499] env[62096]: ERROR nova.compute.manager [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 963.688499] env[62096]: ERROR nova.compute.manager [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 963.688499] env[62096]: ERROR nova.compute.manager [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] vm_ref = self.build_virtual_machine(instance, [ 963.688499] env[62096]: ERROR nova.compute.manager [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 963.688885] env[62096]: ERROR nova.compute.manager [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] vif_infos = vmwarevif.get_vif_info(self._session, [ 963.688885] env[62096]: ERROR nova.compute.manager [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 963.688885] env[62096]: ERROR nova.compute.manager [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] for vif in network_info: [ 963.688885] env[62096]: ERROR nova.compute.manager [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 963.688885] env[62096]: ERROR nova.compute.manager [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] return self._sync_wrapper(fn, *args, **kwargs) [ 963.688885] env[62096]: ERROR nova.compute.manager [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 963.688885] env[62096]: ERROR nova.compute.manager [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] self.wait() [ 963.688885] env[62096]: ERROR nova.compute.manager [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 963.688885] env[62096]: ERROR nova.compute.manager [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] self[:] = self._gt.wait() [ 963.688885] env[62096]: ERROR nova.compute.manager [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 963.688885] env[62096]: ERROR nova.compute.manager [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] return self._exit_event.wait() [ 963.688885] env[62096]: ERROR nova.compute.manager [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 963.688885] env[62096]: ERROR nova.compute.manager [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] current.throw(*self._exc) [ 963.689249] env[62096]: ERROR nova.compute.manager [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 963.689249] env[62096]: ERROR nova.compute.manager [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] result = function(*args, **kwargs) [ 963.689249] env[62096]: ERROR nova.compute.manager [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 963.689249] env[62096]: ERROR nova.compute.manager [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] return func(*args, **kwargs) [ 963.689249] env[62096]: ERROR nova.compute.manager [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 963.689249] env[62096]: ERROR nova.compute.manager [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] raise e [ 963.689249] env[62096]: ERROR nova.compute.manager [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 963.689249] env[62096]: ERROR nova.compute.manager [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] nwinfo = self.network_api.allocate_for_instance( [ 963.689249] env[62096]: ERROR nova.compute.manager [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 963.689249] env[62096]: ERROR nova.compute.manager [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] created_port_ids = self._update_ports_for_instance( [ 963.689249] env[62096]: ERROR nova.compute.manager [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 963.689249] env[62096]: ERROR nova.compute.manager [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] with excutils.save_and_reraise_exception(): [ 963.689249] env[62096]: ERROR nova.compute.manager [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 963.689610] env[62096]: ERROR nova.compute.manager [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] self.force_reraise() [ 963.689610] env[62096]: ERROR nova.compute.manager [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 963.689610] env[62096]: ERROR nova.compute.manager [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] raise self.value [ 963.689610] env[62096]: ERROR nova.compute.manager [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 963.689610] env[62096]: ERROR nova.compute.manager [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] updated_port = self._update_port( [ 963.689610] env[62096]: ERROR nova.compute.manager [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 963.689610] env[62096]: ERROR nova.compute.manager [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] _ensure_no_port_binding_failure(port) [ 963.689610] env[62096]: ERROR nova.compute.manager [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 963.689610] env[62096]: ERROR nova.compute.manager [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] raise exception.PortBindingFailed(port_id=port['id']) [ 963.689610] env[62096]: ERROR nova.compute.manager [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] nova.exception.PortBindingFailed: Binding failed for port 85a05da5-6b4d-464e-8794-c1cdaa138525, please check neutron logs for more information. [ 963.689610] env[62096]: ERROR nova.compute.manager [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] [ 963.689610] env[62096]: INFO nova.compute.manager [None req-76cc4f6b-4345-45aa-869a-e02283bb07dd tempest-ServerActionsTestOtherA-356376316 tempest-ServerActionsTestOtherA-356376316-project-member] [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] Terminating instance [ 963.690507] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e569f64dfb2a4c079f33173dea12b894 [ 963.690919] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9f88a197ee0847afa4fe7c04bc84dc86 [ 963.691429] env[62096]: DEBUG oslo_concurrency.lockutils [None req-76cc4f6b-4345-45aa-869a-e02283bb07dd tempest-ServerActionsTestOtherA-356376316 tempest-ServerActionsTestOtherA-356376316-project-member] Acquiring lock "refresh_cache-e6ce8ba5-170a-4221-9e41-15580876f28f" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 963.722099] env[62096]: DEBUG oslo_concurrency.lockutils [None req-031e832e-abd3-4e5b-8f81-81280bf69042 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 963.826163] env[62096]: DEBUG nova.network.neutron [None req-cf26b410-85fb-4494-a97e-7e8a698de974 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: db3c7e13-12d9-466d-9823-a0a107949588] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 963.900656] env[62096]: DEBUG nova.network.neutron [None req-cf26b410-85fb-4494-a97e-7e8a698de974 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: db3c7e13-12d9-466d-9823-a0a107949588] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 963.901144] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-cf26b410-85fb-4494-a97e-7e8a698de974 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg d18446e091d046308e2ded88a69eb6ba in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 963.909927] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d18446e091d046308e2ded88a69eb6ba [ 964.037399] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74ffee14-5613-427e-a4e6-3b986955dccf {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.045371] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f728f773-36d5-4429-87fa-5b6857d924b1 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.075120] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53c9e4af-6148-4bcf-a417-985f625c8c1a {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.081973] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d442182c-8144-4936-b5d0-8795ce59cdec {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.094426] env[62096]: DEBUG nova.compute.provider_tree [None req-2bf30cf6-b5fd-47ba-bb40-34c6f1f16b6f tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Updating inventory in ProviderTree for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 964.094919] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-2bf30cf6-b5fd-47ba-bb40-34c6f1f16b6f tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg 2897be27bbc1402c8166bf181fe00bc4 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 964.101934] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2897be27bbc1402c8166bf181fe00bc4 [ 964.179755] env[62096]: DEBUG oslo_concurrency.lockutils [req-2403274b-e459-4aed-81f7-9d71ae7cc9bf req-8dea125d-259f-4585-9390-117f586b63bb service nova] Releasing lock "refresh_cache-e6ce8ba5-170a-4221-9e41-15580876f28f" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 964.180214] env[62096]: INFO nova.compute.manager [-] [instance: dc451ce2-65de-4497-ad48-fd776f73cb80] Took 1.02 seconds to deallocate network for instance. [ 964.180486] env[62096]: DEBUG oslo_concurrency.lockutils [None req-76cc4f6b-4345-45aa-869a-e02283bb07dd tempest-ServerActionsTestOtherA-356376316 tempest-ServerActionsTestOtherA-356376316-project-member] Acquired lock "refresh_cache-e6ce8ba5-170a-4221-9e41-15580876f28f" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 964.180655] env[62096]: DEBUG nova.network.neutron [None req-76cc4f6b-4345-45aa-869a-e02283bb07dd tempest-ServerActionsTestOtherA-356376316 tempest-ServerActionsTestOtherA-356376316-project-member] [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 964.181058] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-76cc4f6b-4345-45aa-869a-e02283bb07dd tempest-ServerActionsTestOtherA-356376316 tempest-ServerActionsTestOtherA-356376316-project-member] Expecting reply to msg 712232eee14e47ddb8277ec0acb5874d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 964.184907] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-f7e5761e-f8c7-4c8c-8348-b19537e41ea1 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Expecting reply to msg 16a986d3f46542f3819e4258259860f7 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 964.187923] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 712232eee14e47ddb8277ec0acb5874d [ 964.215444] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 16a986d3f46542f3819e4258259860f7 [ 964.403640] env[62096]: DEBUG oslo_concurrency.lockutils [None req-cf26b410-85fb-4494-a97e-7e8a698de974 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Releasing lock "refresh_cache-db3c7e13-12d9-466d-9823-a0a107949588" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 964.403894] env[62096]: DEBUG nova.compute.manager [None req-cf26b410-85fb-4494-a97e-7e8a698de974 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 964.404138] env[62096]: DEBUG nova.compute.manager [None req-cf26b410-85fb-4494-a97e-7e8a698de974 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: db3c7e13-12d9-466d-9823-a0a107949588] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 964.404317] env[62096]: DEBUG nova.network.neutron [None req-cf26b410-85fb-4494-a97e-7e8a698de974 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: db3c7e13-12d9-466d-9823-a0a107949588] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 964.420513] env[62096]: DEBUG nova.network.neutron [None req-cf26b410-85fb-4494-a97e-7e8a698de974 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: db3c7e13-12d9-466d-9823-a0a107949588] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 964.421122] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-cf26b410-85fb-4494-a97e-7e8a698de974 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg 6dbd523ad4974e06b9d0097da4b31953 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 964.427318] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6dbd523ad4974e06b9d0097da4b31953 [ 964.614079] env[62096]: ERROR nova.scheduler.client.report [None req-2bf30cf6-b5fd-47ba-bb40-34c6f1f16b6f tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [req-75389f11-1251-4c71-b396-f2d6ed5c01a9] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 6eefe13c-ab55-4c03-987f-47a62756c3b3. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-75389f11-1251-4c71-b396-f2d6ed5c01a9"}]}: nova.exception.PortBindingFailed: Binding failed for port b8cb63f1-3918-4d7e-9445-e474542f97cc, please check neutron logs for more information. [ 964.628626] env[62096]: DEBUG nova.scheduler.client.report [None req-2bf30cf6-b5fd-47ba-bb40-34c6f1f16b6f tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Refreshing inventories for resource provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 964.640185] env[62096]: DEBUG nova.scheduler.client.report [None req-2bf30cf6-b5fd-47ba-bb40-34c6f1f16b6f tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Updating ProviderTree inventory for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 964.640387] env[62096]: DEBUG nova.compute.provider_tree [None req-2bf30cf6-b5fd-47ba-bb40-34c6f1f16b6f tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Updating inventory in ProviderTree for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 964.649567] env[62096]: DEBUG nova.scheduler.client.report [None req-2bf30cf6-b5fd-47ba-bb40-34c6f1f16b6f tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Refreshing aggregate associations for resource provider 6eefe13c-ab55-4c03-987f-47a62756c3b3, aggregates: None {{(pid=62096) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 964.665160] env[62096]: DEBUG nova.scheduler.client.report [None req-2bf30cf6-b5fd-47ba-bb40-34c6f1f16b6f tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Refreshing trait associations for resource provider 6eefe13c-ab55-4c03-987f-47a62756c3b3, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO {{(pid=62096) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 964.688467] env[62096]: DEBUG oslo_concurrency.lockutils [None req-f7e5761e-f8c7-4c8c-8348-b19537e41ea1 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 964.701234] env[62096]: DEBUG nova.network.neutron [None req-76cc4f6b-4345-45aa-869a-e02283bb07dd tempest-ServerActionsTestOtherA-356376316 tempest-ServerActionsTestOtherA-356376316-project-member] [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 964.729015] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f0df7e4-06fa-4b7f-9732-1abfb2171172 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.736135] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa8a4a01-a1bd-4c11-8b9d-224185ac84cd {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.768388] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28ad56d9-8dcf-4045-8a38-7ca61fe9e1f4 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.775513] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c6a7369-4ab8-47ec-aab8-74d3c0333c1f {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.791563] env[62096]: DEBUG nova.compute.provider_tree [None req-2bf30cf6-b5fd-47ba-bb40-34c6f1f16b6f tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Updating inventory in ProviderTree for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 964.792040] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-2bf30cf6-b5fd-47ba-bb40-34c6f1f16b6f tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg 042e3e4382f041ab91c83d69cc442caf in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 964.805280] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 042e3e4382f041ab91c83d69cc442caf [ 964.810007] env[62096]: DEBUG nova.network.neutron [None req-76cc4f6b-4345-45aa-869a-e02283bb07dd tempest-ServerActionsTestOtherA-356376316 tempest-ServerActionsTestOtherA-356376316-project-member] [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 964.810595] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-76cc4f6b-4345-45aa-869a-e02283bb07dd tempest-ServerActionsTestOtherA-356376316 tempest-ServerActionsTestOtherA-356376316-project-member] Expecting reply to msg f90865e062b542669cbede0f9369b91e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 964.818397] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f90865e062b542669cbede0f9369b91e [ 964.923208] env[62096]: DEBUG nova.network.neutron [None req-cf26b410-85fb-4494-a97e-7e8a698de974 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: db3c7e13-12d9-466d-9823-a0a107949588] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 964.923814] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-cf26b410-85fb-4494-a97e-7e8a698de974 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg af41a61541a24a50b2e4a6f41b1584a1 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 964.931893] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg af41a61541a24a50b2e4a6f41b1584a1 [ 965.103331] env[62096]: DEBUG nova.compute.manager [req-f86bd8a3-303d-423b-bc75-11f11d1ffdad req-941e6b16-14b1-4579-863f-98531ba22fc5 service nova] [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] Received event network-vif-deleted-85a05da5-6b4d-464e-8794-c1cdaa138525 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 965.312845] env[62096]: DEBUG oslo_concurrency.lockutils [None req-76cc4f6b-4345-45aa-869a-e02283bb07dd tempest-ServerActionsTestOtherA-356376316 tempest-ServerActionsTestOtherA-356376316-project-member] Releasing lock "refresh_cache-e6ce8ba5-170a-4221-9e41-15580876f28f" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 965.313265] env[62096]: DEBUG nova.compute.manager [None req-76cc4f6b-4345-45aa-869a-e02283bb07dd tempest-ServerActionsTestOtherA-356376316 tempest-ServerActionsTestOtherA-356376316-project-member] [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] Start destroying the instance on the hypervisor. {{(pid=62096) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 965.313452] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-76cc4f6b-4345-45aa-869a-e02283bb07dd tempest-ServerActionsTestOtherA-356376316 tempest-ServerActionsTestOtherA-356376316-project-member] [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] Destroying instance {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 965.313739] env[62096]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f6374f3a-7a8b-4612-9ee6-d2149e1f45cf {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.323108] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-981d240a-7fc7-4e5b-bd49-920a18ded60c {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.333817] env[62096]: DEBUG nova.scheduler.client.report [None req-2bf30cf6-b5fd-47ba-bb40-34c6f1f16b6f tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Updated inventory for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 with generation 111 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:957}} [ 965.334043] env[62096]: DEBUG nova.compute.provider_tree [None req-2bf30cf6-b5fd-47ba-bb40-34c6f1f16b6f tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Updating resource provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 generation from 111 to 112 during operation: update_inventory {{(pid=62096) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 965.334242] env[62096]: DEBUG nova.compute.provider_tree [None req-2bf30cf6-b5fd-47ba-bb40-34c6f1f16b6f tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Updating inventory in ProviderTree for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 965.336714] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-2bf30cf6-b5fd-47ba-bb40-34c6f1f16b6f tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg 56002473c9284499b18bfa4014e4e267 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 965.349097] env[62096]: WARNING nova.virt.vmwareapi.vmops [None req-76cc4f6b-4345-45aa-869a-e02283bb07dd tempest-ServerActionsTestOtherA-356376316 tempest-ServerActionsTestOtherA-356376316-project-member] [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance e6ce8ba5-170a-4221-9e41-15580876f28f could not be found. [ 965.349303] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-76cc4f6b-4345-45aa-869a-e02283bb07dd tempest-ServerActionsTestOtherA-356376316 tempest-ServerActionsTestOtherA-356376316-project-member] [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] Instance destroyed {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 965.349480] env[62096]: INFO nova.compute.manager [None req-76cc4f6b-4345-45aa-869a-e02283bb07dd tempest-ServerActionsTestOtherA-356376316 tempest-ServerActionsTestOtherA-356376316-project-member] [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] Took 0.04 seconds to destroy the instance on the hypervisor. [ 965.349722] env[62096]: DEBUG oslo.service.loopingcall [None req-76cc4f6b-4345-45aa-869a-e02283bb07dd tempest-ServerActionsTestOtherA-356376316 tempest-ServerActionsTestOtherA-356376316-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 965.350337] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 56002473c9284499b18bfa4014e4e267 [ 965.350707] env[62096]: DEBUG nova.compute.manager [-] [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 965.350804] env[62096]: DEBUG nova.network.neutron [-] [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 965.364704] env[62096]: DEBUG nova.network.neutron [-] [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 965.365189] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 53e34658afa84320a6c1211704f06510 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 965.371366] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 53e34658afa84320a6c1211704f06510 [ 965.426088] env[62096]: INFO nova.compute.manager [None req-cf26b410-85fb-4494-a97e-7e8a698de974 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: db3c7e13-12d9-466d-9823-a0a107949588] Took 1.02 seconds to deallocate network for instance. [ 965.427911] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-cf26b410-85fb-4494-a97e-7e8a698de974 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg a1fbc2fd9e3e4e32b77281c37cd9f878 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 965.458568] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a1fbc2fd9e3e4e32b77281c37cd9f878 [ 965.839361] env[62096]: DEBUG oslo_concurrency.lockutils [None req-2bf30cf6-b5fd-47ba-bb40-34c6f1f16b6f tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.538s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 965.839998] env[62096]: ERROR nova.compute.manager [None req-2bf30cf6-b5fd-47ba-bb40-34c6f1f16b6f tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port b8cb63f1-3918-4d7e-9445-e474542f97cc, please check neutron logs for more information. [ 965.839998] env[62096]: ERROR nova.compute.manager [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] Traceback (most recent call last): [ 965.839998] env[62096]: ERROR nova.compute.manager [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 965.839998] env[62096]: ERROR nova.compute.manager [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] self.driver.spawn(context, instance, image_meta, [ 965.839998] env[62096]: ERROR nova.compute.manager [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 965.839998] env[62096]: ERROR nova.compute.manager [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] self._vmops.spawn(context, instance, image_meta, injected_files, [ 965.839998] env[62096]: ERROR nova.compute.manager [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 965.839998] env[62096]: ERROR nova.compute.manager [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] vm_ref = self.build_virtual_machine(instance, [ 965.839998] env[62096]: ERROR nova.compute.manager [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 965.839998] env[62096]: ERROR nova.compute.manager [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] vif_infos = vmwarevif.get_vif_info(self._session, [ 965.839998] env[62096]: ERROR nova.compute.manager [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 965.840386] env[62096]: ERROR nova.compute.manager [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] for vif in network_info: [ 965.840386] env[62096]: ERROR nova.compute.manager [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 965.840386] env[62096]: ERROR nova.compute.manager [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] return self._sync_wrapper(fn, *args, **kwargs) [ 965.840386] env[62096]: ERROR nova.compute.manager [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 965.840386] env[62096]: ERROR nova.compute.manager [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] self.wait() [ 965.840386] env[62096]: ERROR nova.compute.manager [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 965.840386] env[62096]: ERROR nova.compute.manager [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] self[:] = self._gt.wait() [ 965.840386] env[62096]: ERROR nova.compute.manager [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 965.840386] env[62096]: ERROR nova.compute.manager [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] return self._exit_event.wait() [ 965.840386] env[62096]: ERROR nova.compute.manager [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 965.840386] env[62096]: ERROR nova.compute.manager [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] current.throw(*self._exc) [ 965.840386] env[62096]: ERROR nova.compute.manager [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 965.840386] env[62096]: ERROR nova.compute.manager [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] result = function(*args, **kwargs) [ 965.840819] env[62096]: ERROR nova.compute.manager [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 965.840819] env[62096]: ERROR nova.compute.manager [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] return func(*args, **kwargs) [ 965.840819] env[62096]: ERROR nova.compute.manager [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 965.840819] env[62096]: ERROR nova.compute.manager [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] raise e [ 965.840819] env[62096]: ERROR nova.compute.manager [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 965.840819] env[62096]: ERROR nova.compute.manager [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] nwinfo = self.network_api.allocate_for_instance( [ 965.840819] env[62096]: ERROR nova.compute.manager [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 965.840819] env[62096]: ERROR nova.compute.manager [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] created_port_ids = self._update_ports_for_instance( [ 965.840819] env[62096]: ERROR nova.compute.manager [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 965.840819] env[62096]: ERROR nova.compute.manager [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] with excutils.save_and_reraise_exception(): [ 965.840819] env[62096]: ERROR nova.compute.manager [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 965.840819] env[62096]: ERROR nova.compute.manager [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] self.force_reraise() [ 965.840819] env[62096]: ERROR nova.compute.manager [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 965.841248] env[62096]: ERROR nova.compute.manager [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] raise self.value [ 965.841248] env[62096]: ERROR nova.compute.manager [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 965.841248] env[62096]: ERROR nova.compute.manager [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] updated_port = self._update_port( [ 965.841248] env[62096]: ERROR nova.compute.manager [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 965.841248] env[62096]: ERROR nova.compute.manager [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] _ensure_no_port_binding_failure(port) [ 965.841248] env[62096]: ERROR nova.compute.manager [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 965.841248] env[62096]: ERROR nova.compute.manager [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] raise exception.PortBindingFailed(port_id=port['id']) [ 965.841248] env[62096]: ERROR nova.compute.manager [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] nova.exception.PortBindingFailed: Binding failed for port b8cb63f1-3918-4d7e-9445-e474542f97cc, please check neutron logs for more information. [ 965.841248] env[62096]: ERROR nova.compute.manager [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] [ 965.841248] env[62096]: DEBUG nova.compute.utils [None req-2bf30cf6-b5fd-47ba-bb40-34c6f1f16b6f tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] Binding failed for port b8cb63f1-3918-4d7e-9445-e474542f97cc, please check neutron logs for more information. {{(pid=62096) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 965.842695] env[62096]: DEBUG nova.compute.manager [None req-2bf30cf6-b5fd-47ba-bb40-34c6f1f16b6f tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] Build of instance 8a9e2396-2585-4fb8-9f92-76d4d1578a95 was re-scheduled: Binding failed for port b8cb63f1-3918-4d7e-9445-e474542f97cc, please check neutron logs for more information. {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 965.843095] env[62096]: DEBUG nova.compute.manager [None req-2bf30cf6-b5fd-47ba-bb40-34c6f1f16b6f tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] Unplugging VIFs for instance {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 965.843385] env[62096]: DEBUG oslo_concurrency.lockutils [None req-2bf30cf6-b5fd-47ba-bb40-34c6f1f16b6f tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Acquiring lock "refresh_cache-8a9e2396-2585-4fb8-9f92-76d4d1578a95" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 965.843540] env[62096]: DEBUG oslo_concurrency.lockutils [None req-2bf30cf6-b5fd-47ba-bb40-34c6f1f16b6f tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Acquired lock "refresh_cache-8a9e2396-2585-4fb8-9f92-76d4d1578a95" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 965.843707] env[62096]: DEBUG nova.network.neutron [None req-2bf30cf6-b5fd-47ba-bb40-34c6f1f16b6f tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 965.844183] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-2bf30cf6-b5fd-47ba-bb40-34c6f1f16b6f tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg 338b014fac074760afb627f7f57498da in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 965.845341] env[62096]: DEBUG oslo_concurrency.lockutils [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 4.820s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 965.845611] env[62096]: DEBUG oslo_concurrency.lockutils [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 965.845785] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62096) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 965.846058] env[62096]: DEBUG oslo_concurrency.lockutils [None req-031e832e-abd3-4e5b-8f81-81280bf69042 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.124s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 965.847706] env[62096]: INFO nova.compute.claims [None req-031e832e-abd3-4e5b-8f81-81280bf69042 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 965.849107] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-031e832e-abd3-4e5b-8f81-81280bf69042 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg bfc330b6e0a844bca24b3fd14a319948 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 965.850910] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6d3a218-a3a8-467a-b90f-8294b016a0bc {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.854375] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 338b014fac074760afb627f7f57498da [ 965.861331] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bc1fc84-7dfb-4e85-8f3f-0b1946735801 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.866646] env[62096]: DEBUG nova.network.neutron [-] [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 965.867012] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 44230c79721f4c6296b09016ca0da530 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 965.877805] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 44230c79721f4c6296b09016ca0da530 [ 965.878914] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0221b94f-258b-46a6-9134-cf9f5904b00c {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.885633] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bfc330b6e0a844bca24b3fd14a319948 [ 965.886679] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb105b21-4b4d-43b8-9210-87ed7a91c667 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.915350] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181777MB free_disk=127GB free_vcpus=48 pci_devices=None {{(pid=62096) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 965.915487] env[62096]: DEBUG oslo_concurrency.lockutils [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 965.932459] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-cf26b410-85fb-4494-a97e-7e8a698de974 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg d9ad982ededf4cc898e0a09e4f6cf816 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 965.960325] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d9ad982ededf4cc898e0a09e4f6cf816 [ 966.357122] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-031e832e-abd3-4e5b-8f81-81280bf69042 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg 8a7a12c648d34ffc85a006511802bf36 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 966.361182] env[62096]: DEBUG nova.network.neutron [None req-2bf30cf6-b5fd-47ba-bb40-34c6f1f16b6f tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 966.364517] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8a7a12c648d34ffc85a006511802bf36 [ 966.368605] env[62096]: INFO nova.compute.manager [-] [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] Took 1.02 seconds to deallocate network for instance. [ 966.371589] env[62096]: DEBUG nova.compute.claims [None req-76cc4f6b-4345-45aa-869a-e02283bb07dd tempest-ServerActionsTestOtherA-356376316 tempest-ServerActionsTestOtherA-356376316-project-member] [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] Aborting claim: {{(pid=62096) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 966.371589] env[62096]: DEBUG oslo_concurrency.lockutils [None req-76cc4f6b-4345-45aa-869a-e02283bb07dd tempest-ServerActionsTestOtherA-356376316 tempest-ServerActionsTestOtherA-356376316-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 966.431158] env[62096]: DEBUG nova.network.neutron [None req-2bf30cf6-b5fd-47ba-bb40-34c6f1f16b6f tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 966.431762] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-2bf30cf6-b5fd-47ba-bb40-34c6f1f16b6f tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg 4bb8ab3a60be4afa95088c9996255de3 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 966.441416] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4bb8ab3a60be4afa95088c9996255de3 [ 966.451657] env[62096]: INFO nova.scheduler.client.report [None req-cf26b410-85fb-4494-a97e-7e8a698de974 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Deleted allocations for instance db3c7e13-12d9-466d-9823-a0a107949588 [ 966.458026] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-cf26b410-85fb-4494-a97e-7e8a698de974 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg 53cbb5c16f6b495ebd18806b7cd559e3 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 966.469796] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 53cbb5c16f6b495ebd18806b7cd559e3 [ 966.914915] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92928c85-35ed-4dab-9bdb-d6b494ec109e {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.922347] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdcd25dc-6bda-4c0c-aac8-f5e4ceb81d0b {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.952229] env[62096]: DEBUG oslo_concurrency.lockutils [None req-2bf30cf6-b5fd-47ba-bb40-34c6f1f16b6f tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Releasing lock "refresh_cache-8a9e2396-2585-4fb8-9f92-76d4d1578a95" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 966.952466] env[62096]: DEBUG nova.compute.manager [None req-2bf30cf6-b5fd-47ba-bb40-34c6f1f16b6f tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 966.952645] env[62096]: DEBUG nova.compute.manager [None req-2bf30cf6-b5fd-47ba-bb40-34c6f1f16b6f tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 966.952811] env[62096]: DEBUG nova.network.neutron [None req-2bf30cf6-b5fd-47ba-bb40-34c6f1f16b6f tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 966.954929] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-671af1f3-e1c7-4d5f-9a42-d327c8194e5e {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.961085] env[62096]: DEBUG oslo_concurrency.lockutils [None req-cf26b410-85fb-4494-a97e-7e8a698de974 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Lock "db3c7e13-12d9-466d-9823-a0a107949588" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.617s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 966.962249] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47bac423-5c19-428d-aba7-f6e6f11c41ee {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.974637] env[62096]: DEBUG nova.compute.provider_tree [None req-031e832e-abd3-4e5b-8f81-81280bf69042 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 966.975160] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-031e832e-abd3-4e5b-8f81-81280bf69042 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg 859bcbf756c0405d8c60073aeaeb31a4 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 966.976545] env[62096]: DEBUG nova.network.neutron [None req-2bf30cf6-b5fd-47ba-bb40-34c6f1f16b6f tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 966.977035] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-2bf30cf6-b5fd-47ba-bb40-34c6f1f16b6f tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg 281c792e83de44ab97186e80975c583b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 966.982945] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 281c792e83de44ab97186e80975c583b [ 967.005701] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 859bcbf756c0405d8c60073aeaeb31a4 [ 967.479110] env[62096]: DEBUG nova.scheduler.client.report [None req-031e832e-abd3-4e5b-8f81-81280bf69042 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 967.481453] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-031e832e-abd3-4e5b-8f81-81280bf69042 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg 8a0fa88d1e0649dca4937d813854c9bb in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 967.482479] env[62096]: DEBUG nova.network.neutron [None req-2bf30cf6-b5fd-47ba-bb40-34c6f1f16b6f tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 967.482871] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-2bf30cf6-b5fd-47ba-bb40-34c6f1f16b6f tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg ea4b18a88ab4480b90180d33d4f703e7 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 967.490063] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ea4b18a88ab4480b90180d33d4f703e7 [ 967.493930] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8a0fa88d1e0649dca4937d813854c9bb [ 967.985033] env[62096]: DEBUG oslo_concurrency.lockutils [None req-031e832e-abd3-4e5b-8f81-81280bf69042 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.139s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 967.985578] env[62096]: DEBUG nova.compute.manager [None req-031e832e-abd3-4e5b-8f81-81280bf69042 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] Start building networks asynchronously for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 967.987333] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-031e832e-abd3-4e5b-8f81-81280bf69042 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg 730cd8647d54469bb4a79034bb52798c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 967.996839] env[62096]: INFO nova.compute.manager [None req-2bf30cf6-b5fd-47ba-bb40-34c6f1f16b6f tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: 8a9e2396-2585-4fb8-9f92-76d4d1578a95] Took 1.04 seconds to deallocate network for instance. [ 967.996839] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-2bf30cf6-b5fd-47ba-bb40-34c6f1f16b6f tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg f18daa969b364e5ab70c081a31c775f0 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 967.996839] env[62096]: DEBUG oslo_concurrency.lockutils [None req-f7e5761e-f8c7-4c8c-8348-b19537e41ea1 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.303s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 967.996839] env[62096]: DEBUG nova.objects.instance [None req-f7e5761e-f8c7-4c8c-8348-b19537e41ea1 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Lazy-loading 'resources' on Instance uuid dc451ce2-65de-4497-ad48-fd776f73cb80 {{(pid=62096) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 967.996839] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-f7e5761e-f8c7-4c8c-8348-b19537e41ea1 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Expecting reply to msg 8eda7878c9ec4628aad07ded45e4d3e1 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 968.015484] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8eda7878c9ec4628aad07ded45e4d3e1 [ 968.034937] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f18daa969b364e5ab70c081a31c775f0 [ 968.036907] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 730cd8647d54469bb4a79034bb52798c [ 968.190376] env[62096]: DEBUG oslo_concurrency.lockutils [None req-c3832ec5-7c3d-4059-9e29-89d5368c4b4f tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Acquiring lock "9ee34e47-6090-4617-a08c-0154874d4889" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 968.190656] env[62096]: DEBUG oslo_concurrency.lockutils [None req-c3832ec5-7c3d-4059-9e29-89d5368c4b4f tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Lock "9ee34e47-6090-4617-a08c-0154874d4889" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 968.191137] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c3832ec5-7c3d-4059-9e29-89d5368c4b4f tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg 8cf119f671f54d53b10c563406174d2c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 968.200951] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8cf119f671f54d53b10c563406174d2c [ 968.601502] env[62096]: DEBUG nova.compute.utils [None req-031e832e-abd3-4e5b-8f81-81280bf69042 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Using /dev/sd instead of None {{(pid=62096) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 968.601502] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-031e832e-abd3-4e5b-8f81-81280bf69042 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg dc079739dba44e9abba9a1a749bf6e59 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 968.601502] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-2bf30cf6-b5fd-47ba-bb40-34c6f1f16b6f tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg ecf0fc89336848b6b5ff50da6d581dd4 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 968.601502] env[62096]: DEBUG nova.compute.manager [None req-031e832e-abd3-4e5b-8f81-81280bf69042 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] Allocating IP information in the background. {{(pid=62096) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 968.601502] env[62096]: DEBUG nova.network.neutron [None req-031e832e-abd3-4e5b-8f81-81280bf69042 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] allocate_for_instance() {{(pid=62096) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 968.601502] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dc079739dba44e9abba9a1a749bf6e59 [ 968.601502] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ecf0fc89336848b6b5ff50da6d581dd4 [ 968.602984] env[62096]: DEBUG nova.policy [None req-031e832e-abd3-4e5b-8f81-81280bf69042 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '491a2069427f43e79347a70e475e4dd2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e45573130e8e4ce4979b37e1b4c5af9f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62096) authorize /opt/stack/nova/nova/policy.py:203}} [ 968.615381] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2014abf-e020-4026-8236-2c05aa9ba1e8 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.622367] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1e1a3f0-3072-4afe-aa1f-3c2acafc8f99 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.651417] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a984c5aa-8962-41bf-8b28-633a48da2f13 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.661057] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a376167-5027-42ba-8b14-a81e31cd1e8f {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.674137] env[62096]: DEBUG nova.compute.provider_tree [None req-f7e5761e-f8c7-4c8c-8348-b19537e41ea1 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 968.674630] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-f7e5761e-f8c7-4c8c-8348-b19537e41ea1 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Expecting reply to msg d94db49cf8074300839a052aa89a63a7 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 968.681991] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d94db49cf8074300839a052aa89a63a7 [ 968.692473] env[62096]: DEBUG nova.compute.manager [None req-c3832ec5-7c3d-4059-9e29-89d5368c4b4f tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: 9ee34e47-6090-4617-a08c-0154874d4889] Starting instance... {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 968.694159] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c3832ec5-7c3d-4059-9e29-89d5368c4b4f tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg f3f42ceed6a341e29179a31ed8355661 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 968.725528] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f3f42ceed6a341e29179a31ed8355661 [ 968.851445] env[62096]: DEBUG nova.network.neutron [None req-031e832e-abd3-4e5b-8f81-81280bf69042 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] Successfully created port: 533d8874-f924-49cc-ad4e-ba2d43bd335e {{(pid=62096) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 969.006418] env[62096]: DEBUG nova.compute.manager [None req-031e832e-abd3-4e5b-8f81-81280bf69042 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] Start building block device mappings for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 969.008154] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-031e832e-abd3-4e5b-8f81-81280bf69042 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg f7bb52bcd20942c59af0f56d6f47e14a in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 969.038623] env[62096]: INFO nova.scheduler.client.report [None req-2bf30cf6-b5fd-47ba-bb40-34c6f1f16b6f tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Deleted allocations for instance 8a9e2396-2585-4fb8-9f92-76d4d1578a95 [ 969.044646] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-2bf30cf6-b5fd-47ba-bb40-34c6f1f16b6f tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg 07358a37be814ae6b6be6b661789058f in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 969.051820] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f7bb52bcd20942c59af0f56d6f47e14a [ 969.063458] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 07358a37be814ae6b6be6b661789058f [ 969.177512] env[62096]: DEBUG nova.scheduler.client.report [None req-f7e5761e-f8c7-4c8c-8348-b19537e41ea1 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 969.179820] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-f7e5761e-f8c7-4c8c-8348-b19537e41ea1 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Expecting reply to msg f87fe73caec64125a19c673be794f586 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 969.191723] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f87fe73caec64125a19c673be794f586 [ 969.209476] env[62096]: DEBUG oslo_concurrency.lockutils [None req-c3832ec5-7c3d-4059-9e29-89d5368c4b4f tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 969.459760] env[62096]: DEBUG nova.compute.manager [req-55084ab0-1757-4d9d-b580-194b680e5c36 req-e2372cfc-7ac0-4715-85da-1d89b80ece9f service nova] [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] Received event network-changed-533d8874-f924-49cc-ad4e-ba2d43bd335e {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 969.459985] env[62096]: DEBUG nova.compute.manager [req-55084ab0-1757-4d9d-b580-194b680e5c36 req-e2372cfc-7ac0-4715-85da-1d89b80ece9f service nova] [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] Refreshing instance network info cache due to event network-changed-533d8874-f924-49cc-ad4e-ba2d43bd335e. {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 969.460191] env[62096]: DEBUG oslo_concurrency.lockutils [req-55084ab0-1757-4d9d-b580-194b680e5c36 req-e2372cfc-7ac0-4715-85da-1d89b80ece9f service nova] Acquiring lock "refresh_cache-66af0854-df0c-47ae-9f97-c62c55bf6601" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 969.460312] env[62096]: DEBUG oslo_concurrency.lockutils [req-55084ab0-1757-4d9d-b580-194b680e5c36 req-e2372cfc-7ac0-4715-85da-1d89b80ece9f service nova] Acquired lock "refresh_cache-66af0854-df0c-47ae-9f97-c62c55bf6601" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 969.460473] env[62096]: DEBUG nova.network.neutron [req-55084ab0-1757-4d9d-b580-194b680e5c36 req-e2372cfc-7ac0-4715-85da-1d89b80ece9f service nova] [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] Refreshing network info cache for port 533d8874-f924-49cc-ad4e-ba2d43bd335e {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 969.460890] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-55084ab0-1757-4d9d-b580-194b680e5c36 req-e2372cfc-7ac0-4715-85da-1d89b80ece9f service nova] Expecting reply to msg ef72d76b07b2447986b0b2a8519cb193 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 969.467712] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ef72d76b07b2447986b0b2a8519cb193 [ 969.525468] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-031e832e-abd3-4e5b-8f81-81280bf69042 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg fb729b4eb4eb456fa8d4bd66250eaabe in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 969.549815] env[62096]: DEBUG oslo_concurrency.lockutils [None req-2bf30cf6-b5fd-47ba-bb40-34c6f1f16b6f tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Lock "8a9e2396-2585-4fb8-9f92-76d4d1578a95" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.172s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 969.553539] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fb729b4eb4eb456fa8d4bd66250eaabe [ 969.647439] env[62096]: ERROR nova.compute.manager [None req-031e832e-abd3-4e5b-8f81-81280bf69042 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 533d8874-f924-49cc-ad4e-ba2d43bd335e, please check neutron logs for more information. [ 969.647439] env[62096]: ERROR nova.compute.manager Traceback (most recent call last): [ 969.647439] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 969.647439] env[62096]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 969.647439] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 969.647439] env[62096]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 969.647439] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 969.647439] env[62096]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 969.647439] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 969.647439] env[62096]: ERROR nova.compute.manager self.force_reraise() [ 969.647439] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 969.647439] env[62096]: ERROR nova.compute.manager raise self.value [ 969.647439] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 969.647439] env[62096]: ERROR nova.compute.manager updated_port = self._update_port( [ 969.647439] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 969.647439] env[62096]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 969.648024] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 969.648024] env[62096]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 969.648024] env[62096]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 533d8874-f924-49cc-ad4e-ba2d43bd335e, please check neutron logs for more information. [ 969.648024] env[62096]: ERROR nova.compute.manager [ 969.648024] env[62096]: Traceback (most recent call last): [ 969.648024] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 969.648024] env[62096]: listener.cb(fileno) [ 969.648024] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 969.648024] env[62096]: result = function(*args, **kwargs) [ 969.648024] env[62096]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 969.648024] env[62096]: return func(*args, **kwargs) [ 969.648024] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 969.648024] env[62096]: raise e [ 969.648024] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 969.648024] env[62096]: nwinfo = self.network_api.allocate_for_instance( [ 969.648024] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 969.648024] env[62096]: created_port_ids = self._update_ports_for_instance( [ 969.648024] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 969.648024] env[62096]: with excutils.save_and_reraise_exception(): [ 969.648024] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 969.648024] env[62096]: self.force_reraise() [ 969.648024] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 969.648024] env[62096]: raise self.value [ 969.648024] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 969.648024] env[62096]: updated_port = self._update_port( [ 969.648024] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 969.648024] env[62096]: _ensure_no_port_binding_failure(port) [ 969.648024] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 969.648024] env[62096]: raise exception.PortBindingFailed(port_id=port['id']) [ 969.648954] env[62096]: nova.exception.PortBindingFailed: Binding failed for port 533d8874-f924-49cc-ad4e-ba2d43bd335e, please check neutron logs for more information. [ 969.648954] env[62096]: Removing descriptor: 14 [ 969.682676] env[62096]: DEBUG oslo_concurrency.lockutils [None req-f7e5761e-f8c7-4c8c-8348-b19537e41ea1 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.692s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 969.684932] env[62096]: DEBUG oslo_concurrency.lockutils [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 3.769s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 969.686062] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg 7358273744804d778fd0a97a1a678d5a in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 969.698090] env[62096]: INFO nova.scheduler.client.report [None req-f7e5761e-f8c7-4c8c-8348-b19537e41ea1 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Deleted allocations for instance dc451ce2-65de-4497-ad48-fd776f73cb80 [ 969.701234] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-f7e5761e-f8c7-4c8c-8348-b19537e41ea1 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Expecting reply to msg 8e6396f1dc8d43a5bc382acebaf64f2e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 969.708297] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7358273744804d778fd0a97a1a678d5a [ 969.737495] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8e6396f1dc8d43a5bc382acebaf64f2e [ 969.977613] env[62096]: DEBUG nova.network.neutron [req-55084ab0-1757-4d9d-b580-194b680e5c36 req-e2372cfc-7ac0-4715-85da-1d89b80ece9f service nova] [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 970.028533] env[62096]: DEBUG nova.compute.manager [None req-031e832e-abd3-4e5b-8f81-81280bf69042 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] Start spawning the instance on the hypervisor. {{(pid=62096) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 970.048383] env[62096]: DEBUG nova.network.neutron [req-55084ab0-1757-4d9d-b580-194b680e5c36 req-e2372cfc-7ac0-4715-85da-1d89b80ece9f service nova] [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 970.048717] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-55084ab0-1757-4d9d-b580-194b680e5c36 req-e2372cfc-7ac0-4715-85da-1d89b80ece9f service nova] Expecting reply to msg 4269ec7ce38f41f687189cb49b2149f7 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 970.053075] env[62096]: DEBUG nova.virt.hardware [None req-031e832e-abd3-4e5b-8f81-81280bf69042 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-19T11:52:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-19T11:51:55Z,direct_url=,disk_format='vmdk',id=fb764baa-3805-45c4-a694-aa91b0932110,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bd76085d004b408bbcf1ebf76ba2593e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-19T11:51:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 970.053298] env[62096]: DEBUG nova.virt.hardware [None req-031e832e-abd3-4e5b-8f81-81280bf69042 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Flavor limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 970.053456] env[62096]: DEBUG nova.virt.hardware [None req-031e832e-abd3-4e5b-8f81-81280bf69042 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Image limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 970.053632] env[62096]: DEBUG nova.virt.hardware [None req-031e832e-abd3-4e5b-8f81-81280bf69042 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Flavor pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 970.053772] env[62096]: DEBUG nova.virt.hardware [None req-031e832e-abd3-4e5b-8f81-81280bf69042 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Image pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 970.053913] env[62096]: DEBUG nova.virt.hardware [None req-031e832e-abd3-4e5b-8f81-81280bf69042 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 970.054179] env[62096]: DEBUG nova.virt.hardware [None req-031e832e-abd3-4e5b-8f81-81280bf69042 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 970.054345] env[62096]: DEBUG nova.virt.hardware [None req-031e832e-abd3-4e5b-8f81-81280bf69042 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 970.054511] env[62096]: DEBUG nova.virt.hardware [None req-031e832e-abd3-4e5b-8f81-81280bf69042 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Got 1 possible topologies {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 970.054666] env[62096]: DEBUG nova.virt.hardware [None req-031e832e-abd3-4e5b-8f81-81280bf69042 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 970.054829] env[62096]: DEBUG nova.virt.hardware [None req-031e832e-abd3-4e5b-8f81-81280bf69042 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 970.055702] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a058b684-40f7-4c63-9ae6-944c98d680ff {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.058940] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4269ec7ce38f41f687189cb49b2149f7 [ 970.065058] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34ac8ffe-faa5-4eb5-bba3-03fb5bbe875b {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.079530] env[62096]: ERROR nova.compute.manager [None req-031e832e-abd3-4e5b-8f81-81280bf69042 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 533d8874-f924-49cc-ad4e-ba2d43bd335e, please check neutron logs for more information. [ 970.079530] env[62096]: ERROR nova.compute.manager [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] Traceback (most recent call last): [ 970.079530] env[62096]: ERROR nova.compute.manager [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 970.079530] env[62096]: ERROR nova.compute.manager [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] yield resources [ 970.079530] env[62096]: ERROR nova.compute.manager [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 970.079530] env[62096]: ERROR nova.compute.manager [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] self.driver.spawn(context, instance, image_meta, [ 970.079530] env[62096]: ERROR nova.compute.manager [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 970.079530] env[62096]: ERROR nova.compute.manager [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] self._vmops.spawn(context, instance, image_meta, injected_files, [ 970.079530] env[62096]: ERROR nova.compute.manager [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 970.079530] env[62096]: ERROR nova.compute.manager [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] vm_ref = self.build_virtual_machine(instance, [ 970.079530] env[62096]: ERROR nova.compute.manager [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 970.079975] env[62096]: ERROR nova.compute.manager [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] vif_infos = vmwarevif.get_vif_info(self._session, [ 970.079975] env[62096]: ERROR nova.compute.manager [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 970.079975] env[62096]: ERROR nova.compute.manager [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] for vif in network_info: [ 970.079975] env[62096]: ERROR nova.compute.manager [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 970.079975] env[62096]: ERROR nova.compute.manager [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] return self._sync_wrapper(fn, *args, **kwargs) [ 970.079975] env[62096]: ERROR nova.compute.manager [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 970.079975] env[62096]: ERROR nova.compute.manager [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] self.wait() [ 970.079975] env[62096]: ERROR nova.compute.manager [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 970.079975] env[62096]: ERROR nova.compute.manager [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] self[:] = self._gt.wait() [ 970.079975] env[62096]: ERROR nova.compute.manager [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 970.079975] env[62096]: ERROR nova.compute.manager [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] return self._exit_event.wait() [ 970.079975] env[62096]: ERROR nova.compute.manager [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 970.079975] env[62096]: ERROR nova.compute.manager [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] current.throw(*self._exc) [ 970.080389] env[62096]: ERROR nova.compute.manager [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 970.080389] env[62096]: ERROR nova.compute.manager [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] result = function(*args, **kwargs) [ 970.080389] env[62096]: ERROR nova.compute.manager [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 970.080389] env[62096]: ERROR nova.compute.manager [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] return func(*args, **kwargs) [ 970.080389] env[62096]: ERROR nova.compute.manager [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 970.080389] env[62096]: ERROR nova.compute.manager [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] raise e [ 970.080389] env[62096]: ERROR nova.compute.manager [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 970.080389] env[62096]: ERROR nova.compute.manager [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] nwinfo = self.network_api.allocate_for_instance( [ 970.080389] env[62096]: ERROR nova.compute.manager [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 970.080389] env[62096]: ERROR nova.compute.manager [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] created_port_ids = self._update_ports_for_instance( [ 970.080389] env[62096]: ERROR nova.compute.manager [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 970.080389] env[62096]: ERROR nova.compute.manager [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] with excutils.save_and_reraise_exception(): [ 970.080389] env[62096]: ERROR nova.compute.manager [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 970.080830] env[62096]: ERROR nova.compute.manager [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] self.force_reraise() [ 970.080830] env[62096]: ERROR nova.compute.manager [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 970.080830] env[62096]: ERROR nova.compute.manager [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] raise self.value [ 970.080830] env[62096]: ERROR nova.compute.manager [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 970.080830] env[62096]: ERROR nova.compute.manager [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] updated_port = self._update_port( [ 970.080830] env[62096]: ERROR nova.compute.manager [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 970.080830] env[62096]: ERROR nova.compute.manager [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] _ensure_no_port_binding_failure(port) [ 970.080830] env[62096]: ERROR nova.compute.manager [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 970.080830] env[62096]: ERROR nova.compute.manager [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] raise exception.PortBindingFailed(port_id=port['id']) [ 970.080830] env[62096]: ERROR nova.compute.manager [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] nova.exception.PortBindingFailed: Binding failed for port 533d8874-f924-49cc-ad4e-ba2d43bd335e, please check neutron logs for more information. [ 970.080830] env[62096]: ERROR nova.compute.manager [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] [ 970.080830] env[62096]: INFO nova.compute.manager [None req-031e832e-abd3-4e5b-8f81-81280bf69042 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] Terminating instance [ 970.081595] env[62096]: DEBUG oslo_concurrency.lockutils [None req-031e832e-abd3-4e5b-8f81-81280bf69042 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Acquiring lock "refresh_cache-66af0854-df0c-47ae-9f97-c62c55bf6601" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 970.190097] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg 76c179213600487da09fa836f3e59f09 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 970.201093] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 76c179213600487da09fa836f3e59f09 [ 970.207265] env[62096]: DEBUG oslo_concurrency.lockutils [None req-f7e5761e-f8c7-4c8c-8348-b19537e41ea1 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Lock "dc451ce2-65de-4497-ad48-fd776f73cb80" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.211s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 970.207608] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-f7e5761e-f8c7-4c8c-8348-b19537e41ea1 tempest-ServerShowV247Test-791895041 tempest-ServerShowV247Test-791895041-project-member] Expecting reply to msg addcb89f22c544d2b4af48f6b6228b90 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 970.224216] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg addcb89f22c544d2b4af48f6b6228b90 [ 970.551364] env[62096]: DEBUG oslo_concurrency.lockutils [req-55084ab0-1757-4d9d-b580-194b680e5c36 req-e2372cfc-7ac0-4715-85da-1d89b80ece9f service nova] Releasing lock "refresh_cache-66af0854-df0c-47ae-9f97-c62c55bf6601" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 970.551758] env[62096]: DEBUG oslo_concurrency.lockutils [None req-031e832e-abd3-4e5b-8f81-81280bf69042 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Acquired lock "refresh_cache-66af0854-df0c-47ae-9f97-c62c55bf6601" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 970.552061] env[62096]: DEBUG nova.network.neutron [None req-031e832e-abd3-4e5b-8f81-81280bf69042 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 970.552509] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-031e832e-abd3-4e5b-8f81-81280bf69042 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg 744b3b9aa35f40e8ad5e41904c44d618 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 970.559595] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 744b3b9aa35f40e8ad5e41904c44d618 [ 970.711685] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Instance e6ce8ba5-170a-4221-9e41-15580876f28f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62096) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 970.711878] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Instance 66af0854-df0c-47ae-9f97-c62c55bf6601 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62096) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 970.712514] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg 8d80507d909b43fb8c5e6029efdae985 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 970.722586] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8d80507d909b43fb8c5e6029efdae985 [ 971.069985] env[62096]: DEBUG nova.network.neutron [None req-031e832e-abd3-4e5b-8f81-81280bf69042 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 971.144176] env[62096]: DEBUG nova.network.neutron [None req-031e832e-abd3-4e5b-8f81-81280bf69042 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 971.144737] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-031e832e-abd3-4e5b-8f81-81280bf69042 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg ee60f010cd89493d9a1d54bd9117c14b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 971.152958] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ee60f010cd89493d9a1d54bd9117c14b [ 971.214805] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Instance 9ee34e47-6090-4617-a08c-0154874d4889 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62096) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 971.215077] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Total usable vcpus: 48, total allocated vcpus: 2 {{(pid=62096) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 971.215334] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=896MB phys_disk=200GB used_disk=2GB total_vcpus=48 used_vcpus=2 pci_stats=[] {{(pid=62096) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 971.276390] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1af0fbe5-07fe-41b3-8b8c-bf74892def6c {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.284747] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-556175d9-7f4b-4fa0-a3c2-4870399d7a28 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.315780] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdd26a22-2a2c-4756-81e4-c30ff107161b {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.322953] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6994a7d-a68c-4423-b4fe-226e0c112f8a {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.335526] env[62096]: DEBUG nova.compute.provider_tree [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 971.335975] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg 5c26d7e499034a68be7c5cb86038231c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 971.347192] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5c26d7e499034a68be7c5cb86038231c [ 971.356797] env[62096]: DEBUG oslo_concurrency.lockutils [None req-1f6cc430-d24d-425c-86fd-fb94cba13cee tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Acquiring lock "adea4ebf-6b22-4fd2-b601-bad448e4bbc5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 971.357009] env[62096]: DEBUG oslo_concurrency.lockutils [None req-1f6cc430-d24d-425c-86fd-fb94cba13cee tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Lock "adea4ebf-6b22-4fd2-b601-bad448e4bbc5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 971.357439] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1f6cc430-d24d-425c-86fd-fb94cba13cee tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg 5dbc256679f84bb6838d8f9c96dd3ffc in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 971.366117] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5dbc256679f84bb6838d8f9c96dd3ffc [ 971.484367] env[62096]: DEBUG nova.compute.manager [req-1f8d6a10-0b28-4c0a-90b8-c208bf833de3 req-682176a5-8abb-4be6-8ee2-2e86e410e0ba service nova] [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] Received event network-vif-deleted-533d8874-f924-49cc-ad4e-ba2d43bd335e {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 971.647553] env[62096]: DEBUG oslo_concurrency.lockutils [None req-031e832e-abd3-4e5b-8f81-81280bf69042 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Releasing lock "refresh_cache-66af0854-df0c-47ae-9f97-c62c55bf6601" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 971.648303] env[62096]: DEBUG nova.compute.manager [None req-031e832e-abd3-4e5b-8f81-81280bf69042 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] Start destroying the instance on the hypervisor. {{(pid=62096) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 971.648543] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-031e832e-abd3-4e5b-8f81-81280bf69042 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] Destroying instance {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 971.648852] env[62096]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a5c06577-7a78-49eb-98d1-3d2d74517f13 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.657643] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eaed4e0e-9727-4823-899a-934fe1f0ed86 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.678100] env[62096]: WARNING nova.virt.vmwareapi.vmops [None req-031e832e-abd3-4e5b-8f81-81280bf69042 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 66af0854-df0c-47ae-9f97-c62c55bf6601 could not be found. [ 971.678310] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-031e832e-abd3-4e5b-8f81-81280bf69042 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] Instance destroyed {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 971.678484] env[62096]: INFO nova.compute.manager [None req-031e832e-abd3-4e5b-8f81-81280bf69042 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] Took 0.03 seconds to destroy the instance on the hypervisor. [ 971.678828] env[62096]: DEBUG oslo.service.loopingcall [None req-031e832e-abd3-4e5b-8f81-81280bf69042 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 971.679114] env[62096]: DEBUG nova.compute.manager [-] [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 971.679217] env[62096]: DEBUG nova.network.neutron [-] [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 971.692878] env[62096]: DEBUG nova.network.neutron [-] [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 971.693326] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 53537dbf14254987bbeaee2f0c7d1527 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 971.700116] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 53537dbf14254987bbeaee2f0c7d1527 [ 971.838923] env[62096]: DEBUG nova.scheduler.client.report [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 971.841774] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg 25cc3cb445c0420db05bbe06a42e6f85 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 971.853619] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 25cc3cb445c0420db05bbe06a42e6f85 [ 971.859825] env[62096]: DEBUG nova.compute.manager [None req-1f6cc430-d24d-425c-86fd-fb94cba13cee tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] Starting instance... {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 971.861464] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1f6cc430-d24d-425c-86fd-fb94cba13cee tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg e7072162b9354cc1a55fdbfe10e72e4c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 971.893030] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e7072162b9354cc1a55fdbfe10e72e4c [ 972.164045] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 42f0dfbc8e14461c8395f193712552d5 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 972.172755] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 42f0dfbc8e14461c8395f193712552d5 [ 972.195602] env[62096]: DEBUG nova.network.neutron [-] [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 972.196015] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg e7e32ac489864a1488a323217bf780b0 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 972.203757] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e7e32ac489864a1488a323217bf780b0 [ 972.344135] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62096) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 972.344345] env[62096]: DEBUG oslo_concurrency.lockutils [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.660s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 972.344608] env[62096]: DEBUG oslo_concurrency.lockutils [None req-76cc4f6b-4345-45aa-869a-e02283bb07dd tempest-ServerActionsTestOtherA-356376316 tempest-ServerActionsTestOtherA-356376316-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 5.973s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 972.346582] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-76cc4f6b-4345-45aa-869a-e02283bb07dd tempest-ServerActionsTestOtherA-356376316 tempest-ServerActionsTestOtherA-356376316-project-member] Expecting reply to msg 12391b81e3c3461d90dc9c812ed78aea in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 972.377053] env[62096]: DEBUG oslo_concurrency.lockutils [None req-1f6cc430-d24d-425c-86fd-fb94cba13cee tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 972.377632] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 12391b81e3c3461d90dc9c812ed78aea [ 972.697978] env[62096]: INFO nova.compute.manager [-] [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] Took 1.02 seconds to deallocate network for instance. [ 972.700233] env[62096]: DEBUG nova.compute.claims [None req-031e832e-abd3-4e5b-8f81-81280bf69042 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] Aborting claim: {{(pid=62096) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 972.700414] env[62096]: DEBUG oslo_concurrency.lockutils [None req-031e832e-abd3-4e5b-8f81-81280bf69042 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 972.908737] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95319992-b766-4d26-8bb4-48db32e69137 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.916313] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbf913ae-6386-41d7-ba46-3b875a79b91c {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.945819] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b7520b7-cf66-40b9-bbb7-7405ce354f6d {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.953148] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15cb6078-6bd5-474e-a10b-5ed2348857ac {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.966057] env[62096]: DEBUG nova.compute.provider_tree [None req-76cc4f6b-4345-45aa-869a-e02283bb07dd tempest-ServerActionsTestOtherA-356376316 tempest-ServerActionsTestOtherA-356376316-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 972.966574] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-76cc4f6b-4345-45aa-869a-e02283bb07dd tempest-ServerActionsTestOtherA-356376316 tempest-ServerActionsTestOtherA-356376316-project-member] Expecting reply to msg 4dab9d6c6362483a8b38553c90fe5bd0 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 972.973604] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4dab9d6c6362483a8b38553c90fe5bd0 [ 973.468817] env[62096]: DEBUG nova.scheduler.client.report [None req-76cc4f6b-4345-45aa-869a-e02283bb07dd tempest-ServerActionsTestOtherA-356376316 tempest-ServerActionsTestOtherA-356376316-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 973.471133] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-76cc4f6b-4345-45aa-869a-e02283bb07dd tempest-ServerActionsTestOtherA-356376316 tempest-ServerActionsTestOtherA-356376316-project-member] Expecting reply to msg 35e47e0bc01645eda96808339a47bb45 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 973.483128] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 35e47e0bc01645eda96808339a47bb45 [ 973.974799] env[62096]: DEBUG oslo_concurrency.lockutils [None req-76cc4f6b-4345-45aa-869a-e02283bb07dd tempest-ServerActionsTestOtherA-356376316 tempest-ServerActionsTestOtherA-356376316-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.630s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 973.975467] env[62096]: ERROR nova.compute.manager [None req-76cc4f6b-4345-45aa-869a-e02283bb07dd tempest-ServerActionsTestOtherA-356376316 tempest-ServerActionsTestOtherA-356376316-project-member] [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 85a05da5-6b4d-464e-8794-c1cdaa138525, please check neutron logs for more information. [ 973.975467] env[62096]: ERROR nova.compute.manager [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] Traceback (most recent call last): [ 973.975467] env[62096]: ERROR nova.compute.manager [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 973.975467] env[62096]: ERROR nova.compute.manager [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] self.driver.spawn(context, instance, image_meta, [ 973.975467] env[62096]: ERROR nova.compute.manager [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 973.975467] env[62096]: ERROR nova.compute.manager [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 973.975467] env[62096]: ERROR nova.compute.manager [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 973.975467] env[62096]: ERROR nova.compute.manager [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] vm_ref = self.build_virtual_machine(instance, [ 973.975467] env[62096]: ERROR nova.compute.manager [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 973.975467] env[62096]: ERROR nova.compute.manager [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] vif_infos = vmwarevif.get_vif_info(self._session, [ 973.975467] env[62096]: ERROR nova.compute.manager [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 973.976025] env[62096]: ERROR nova.compute.manager [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] for vif in network_info: [ 973.976025] env[62096]: ERROR nova.compute.manager [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 973.976025] env[62096]: ERROR nova.compute.manager [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] return self._sync_wrapper(fn, *args, **kwargs) [ 973.976025] env[62096]: ERROR nova.compute.manager [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 973.976025] env[62096]: ERROR nova.compute.manager [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] self.wait() [ 973.976025] env[62096]: ERROR nova.compute.manager [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 973.976025] env[62096]: ERROR nova.compute.manager [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] self[:] = self._gt.wait() [ 973.976025] env[62096]: ERROR nova.compute.manager [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 973.976025] env[62096]: ERROR nova.compute.manager [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] return self._exit_event.wait() [ 973.976025] env[62096]: ERROR nova.compute.manager [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 973.976025] env[62096]: ERROR nova.compute.manager [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] current.throw(*self._exc) [ 973.976025] env[62096]: ERROR nova.compute.manager [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 973.976025] env[62096]: ERROR nova.compute.manager [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] result = function(*args, **kwargs) [ 973.976623] env[62096]: ERROR nova.compute.manager [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 973.976623] env[62096]: ERROR nova.compute.manager [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] return func(*args, **kwargs) [ 973.976623] env[62096]: ERROR nova.compute.manager [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 973.976623] env[62096]: ERROR nova.compute.manager [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] raise e [ 973.976623] env[62096]: ERROR nova.compute.manager [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 973.976623] env[62096]: ERROR nova.compute.manager [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] nwinfo = self.network_api.allocate_for_instance( [ 973.976623] env[62096]: ERROR nova.compute.manager [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 973.976623] env[62096]: ERROR nova.compute.manager [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] created_port_ids = self._update_ports_for_instance( [ 973.976623] env[62096]: ERROR nova.compute.manager [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 973.976623] env[62096]: ERROR nova.compute.manager [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] with excutils.save_and_reraise_exception(): [ 973.976623] env[62096]: ERROR nova.compute.manager [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 973.976623] env[62096]: ERROR nova.compute.manager [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] self.force_reraise() [ 973.976623] env[62096]: ERROR nova.compute.manager [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 973.977207] env[62096]: ERROR nova.compute.manager [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] raise self.value [ 973.977207] env[62096]: ERROR nova.compute.manager [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 973.977207] env[62096]: ERROR nova.compute.manager [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] updated_port = self._update_port( [ 973.977207] env[62096]: ERROR nova.compute.manager [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 973.977207] env[62096]: ERROR nova.compute.manager [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] _ensure_no_port_binding_failure(port) [ 973.977207] env[62096]: ERROR nova.compute.manager [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 973.977207] env[62096]: ERROR nova.compute.manager [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] raise exception.PortBindingFailed(port_id=port['id']) [ 973.977207] env[62096]: ERROR nova.compute.manager [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] nova.exception.PortBindingFailed: Binding failed for port 85a05da5-6b4d-464e-8794-c1cdaa138525, please check neutron logs for more information. [ 973.977207] env[62096]: ERROR nova.compute.manager [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] [ 973.977207] env[62096]: DEBUG nova.compute.utils [None req-76cc4f6b-4345-45aa-869a-e02283bb07dd tempest-ServerActionsTestOtherA-356376316 tempest-ServerActionsTestOtherA-356376316-project-member] [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] Binding failed for port 85a05da5-6b4d-464e-8794-c1cdaa138525, please check neutron logs for more information. {{(pid=62096) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 973.979250] env[62096]: DEBUG nova.compute.manager [None req-76cc4f6b-4345-45aa-869a-e02283bb07dd tempest-ServerActionsTestOtherA-356376316 tempest-ServerActionsTestOtherA-356376316-project-member] [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] Build of instance e6ce8ba5-170a-4221-9e41-15580876f28f was re-scheduled: Binding failed for port 85a05da5-6b4d-464e-8794-c1cdaa138525, please check neutron logs for more information. {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 973.979659] env[62096]: DEBUG nova.compute.manager [None req-76cc4f6b-4345-45aa-869a-e02283bb07dd tempest-ServerActionsTestOtherA-356376316 tempest-ServerActionsTestOtherA-356376316-project-member] [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] Unplugging VIFs for instance {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 973.979885] env[62096]: DEBUG oslo_concurrency.lockutils [None req-76cc4f6b-4345-45aa-869a-e02283bb07dd tempest-ServerActionsTestOtherA-356376316 tempest-ServerActionsTestOtherA-356376316-project-member] Acquiring lock "refresh_cache-e6ce8ba5-170a-4221-9e41-15580876f28f" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 973.980042] env[62096]: DEBUG oslo_concurrency.lockutils [None req-76cc4f6b-4345-45aa-869a-e02283bb07dd tempest-ServerActionsTestOtherA-356376316 tempest-ServerActionsTestOtherA-356376316-project-member] Acquired lock "refresh_cache-e6ce8ba5-170a-4221-9e41-15580876f28f" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 973.980205] env[62096]: DEBUG nova.network.neutron [None req-76cc4f6b-4345-45aa-869a-e02283bb07dd tempest-ServerActionsTestOtherA-356376316 tempest-ServerActionsTestOtherA-356376316-project-member] [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 973.980609] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-76cc4f6b-4345-45aa-869a-e02283bb07dd tempest-ServerActionsTestOtherA-356376316 tempest-ServerActionsTestOtherA-356376316-project-member] Expecting reply to msg 29d1493f4e03431daa512975a4ed691e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 973.981762] env[62096]: DEBUG oslo_concurrency.lockutils [None req-c3832ec5-7c3d-4059-9e29-89d5368c4b4f tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.772s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 973.983294] env[62096]: INFO nova.compute.claims [None req-c3832ec5-7c3d-4059-9e29-89d5368c4b4f tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: 9ee34e47-6090-4617-a08c-0154874d4889] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 973.984812] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c3832ec5-7c3d-4059-9e29-89d5368c4b4f tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg c9fb96922af440fe8e660d483dc820b6 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 973.999140] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 29d1493f4e03431daa512975a4ed691e [ 974.063016] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c9fb96922af440fe8e660d483dc820b6 [ 974.488356] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c3832ec5-7c3d-4059-9e29-89d5368c4b4f tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg 2039e4c1486748daac0cdf55295e58f9 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 974.496903] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2039e4c1486748daac0cdf55295e58f9 [ 974.500161] env[62096]: DEBUG nova.network.neutron [None req-76cc4f6b-4345-45aa-869a-e02283bb07dd tempest-ServerActionsTestOtherA-356376316 tempest-ServerActionsTestOtherA-356376316-project-member] [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 974.578967] env[62096]: DEBUG nova.network.neutron [None req-76cc4f6b-4345-45aa-869a-e02283bb07dd tempest-ServerActionsTestOtherA-356376316 tempest-ServerActionsTestOtherA-356376316-project-member] [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 974.579544] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-76cc4f6b-4345-45aa-869a-e02283bb07dd tempest-ServerActionsTestOtherA-356376316 tempest-ServerActionsTestOtherA-356376316-project-member] Expecting reply to msg 1c7aab97ae914427b70ebec207896842 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 974.587705] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1c7aab97ae914427b70ebec207896842 [ 975.057311] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7104907-b340-4883-a42d-fcb0d4efd645 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.065273] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5da81ed6-bb18-45e4-8bd7-926ec087c6ca {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.093519] env[62096]: DEBUG oslo_concurrency.lockutils [None req-76cc4f6b-4345-45aa-869a-e02283bb07dd tempest-ServerActionsTestOtherA-356376316 tempest-ServerActionsTestOtherA-356376316-project-member] Releasing lock "refresh_cache-e6ce8ba5-170a-4221-9e41-15580876f28f" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 975.093723] env[62096]: DEBUG nova.compute.manager [None req-76cc4f6b-4345-45aa-869a-e02283bb07dd tempest-ServerActionsTestOtherA-356376316 tempest-ServerActionsTestOtherA-356376316-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 975.093901] env[62096]: DEBUG nova.compute.manager [None req-76cc4f6b-4345-45aa-869a-e02283bb07dd tempest-ServerActionsTestOtherA-356376316 tempest-ServerActionsTestOtherA-356376316-project-member] [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 975.094097] env[62096]: DEBUG nova.network.neutron [None req-76cc4f6b-4345-45aa-869a-e02283bb07dd tempest-ServerActionsTestOtherA-356376316 tempest-ServerActionsTestOtherA-356376316-project-member] [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 975.096239] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1abbf915-baf5-4ef3-b1b4-41024aea79ee {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.104016] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60f57013-4039-48bb-bd34-5633843b6f99 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.116703] env[62096]: DEBUG nova.compute.provider_tree [None req-c3832ec5-7c3d-4059-9e29-89d5368c4b4f tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 975.117297] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c3832ec5-7c3d-4059-9e29-89d5368c4b4f tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg f33c416b74d04f479a11bb4e2633e02d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 975.125138] env[62096]: DEBUG nova.network.neutron [None req-76cc4f6b-4345-45aa-869a-e02283bb07dd tempest-ServerActionsTestOtherA-356376316 tempest-ServerActionsTestOtherA-356376316-project-member] [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 975.125664] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-76cc4f6b-4345-45aa-869a-e02283bb07dd tempest-ServerActionsTestOtherA-356376316 tempest-ServerActionsTestOtherA-356376316-project-member] Expecting reply to msg 1dbf131872ca4692904019f1ebb85e49 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 975.127116] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f33c416b74d04f479a11bb4e2633e02d [ 975.131561] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1dbf131872ca4692904019f1ebb85e49 [ 975.626259] env[62096]: DEBUG nova.scheduler.client.report [None req-c3832ec5-7c3d-4059-9e29-89d5368c4b4f tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 975.628632] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c3832ec5-7c3d-4059-9e29-89d5368c4b4f tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg 1427ae356ded4c5d90f6c1357d9eeb57 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 975.631285] env[62096]: DEBUG nova.network.neutron [None req-76cc4f6b-4345-45aa-869a-e02283bb07dd tempest-ServerActionsTestOtherA-356376316 tempest-ServerActionsTestOtherA-356376316-project-member] [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 975.631285] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-76cc4f6b-4345-45aa-869a-e02283bb07dd tempest-ServerActionsTestOtherA-356376316 tempest-ServerActionsTestOtherA-356376316-project-member] Expecting reply to msg 1a83f178f45f44cf936b487dd448931a in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 975.639102] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1427ae356ded4c5d90f6c1357d9eeb57 [ 975.642117] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1a83f178f45f44cf936b487dd448931a [ 976.131312] env[62096]: DEBUG oslo_concurrency.lockutils [None req-c3832ec5-7c3d-4059-9e29-89d5368c4b4f tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.149s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 976.131900] env[62096]: DEBUG nova.compute.manager [None req-c3832ec5-7c3d-4059-9e29-89d5368c4b4f tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: 9ee34e47-6090-4617-a08c-0154874d4889] Start building networks asynchronously for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 976.133571] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c3832ec5-7c3d-4059-9e29-89d5368c4b4f tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg d879184411194748823ea3c024de9427 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 976.134636] env[62096]: DEBUG oslo_concurrency.lockutils [None req-1f6cc430-d24d-425c-86fd-fb94cba13cee tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.758s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 976.135994] env[62096]: INFO nova.compute.claims [None req-1f6cc430-d24d-425c-86fd-fb94cba13cee tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 976.137451] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1f6cc430-d24d-425c-86fd-fb94cba13cee tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg 6176269a14584090bd89f8eb4dd7f1ba in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 976.138683] env[62096]: INFO nova.compute.manager [None req-76cc4f6b-4345-45aa-869a-e02283bb07dd tempest-ServerActionsTestOtherA-356376316 tempest-ServerActionsTestOtherA-356376316-project-member] [instance: e6ce8ba5-170a-4221-9e41-15580876f28f] Took 1.04 seconds to deallocate network for instance. [ 976.140219] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-76cc4f6b-4345-45aa-869a-e02283bb07dd tempest-ServerActionsTestOtherA-356376316 tempest-ServerActionsTestOtherA-356376316-project-member] Expecting reply to msg 29f2286696f3472b978b9d48c0e23d7a in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 976.169101] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6176269a14584090bd89f8eb4dd7f1ba [ 976.180666] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d879184411194748823ea3c024de9427 [ 976.185427] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 29f2286696f3472b978b9d48c0e23d7a [ 976.640916] env[62096]: DEBUG nova.compute.utils [None req-c3832ec5-7c3d-4059-9e29-89d5368c4b4f tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Using /dev/sd instead of None {{(pid=62096) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 976.641541] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c3832ec5-7c3d-4059-9e29-89d5368c4b4f tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg 42b9c716e20f4482815aa78968eb06bb in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 976.642501] env[62096]: DEBUG nova.compute.manager [None req-c3832ec5-7c3d-4059-9e29-89d5368c4b4f tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: 9ee34e47-6090-4617-a08c-0154874d4889] Allocating IP information in the background. {{(pid=62096) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 976.642680] env[62096]: DEBUG nova.network.neutron [None req-c3832ec5-7c3d-4059-9e29-89d5368c4b4f tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: 9ee34e47-6090-4617-a08c-0154874d4889] allocate_for_instance() {{(pid=62096) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 976.645609] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1f6cc430-d24d-425c-86fd-fb94cba13cee tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg b4a7611cff8e400a81194c2402514dc9 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 976.648881] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-76cc4f6b-4345-45aa-869a-e02283bb07dd tempest-ServerActionsTestOtherA-356376316 tempest-ServerActionsTestOtherA-356376316-project-member] Expecting reply to msg 898edcdcd62b4332a3136ead29ede4ad in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 976.653597] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b4a7611cff8e400a81194c2402514dc9 [ 976.662788] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 42b9c716e20f4482815aa78968eb06bb [ 976.683628] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 898edcdcd62b4332a3136ead29ede4ad [ 976.685240] env[62096]: DEBUG nova.policy [None req-c3832ec5-7c3d-4059-9e29-89d5368c4b4f tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '09a4673f033c4d139efe4cd9ba4b7560', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd81a5a37184d4b29ad6df7e77dfd3ee4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62096) authorize /opt/stack/nova/nova/policy.py:203}} [ 976.926263] env[62096]: DEBUG nova.network.neutron [None req-c3832ec5-7c3d-4059-9e29-89d5368c4b4f tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: 9ee34e47-6090-4617-a08c-0154874d4889] Successfully created port: 68714d46-8dc6-4486-a90c-fa88ca23de64 {{(pid=62096) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 977.152892] env[62096]: DEBUG nova.compute.manager [None req-c3832ec5-7c3d-4059-9e29-89d5368c4b4f tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: 9ee34e47-6090-4617-a08c-0154874d4889] Start building block device mappings for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 977.154555] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c3832ec5-7c3d-4059-9e29-89d5368c4b4f tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg 8f5ef45c4dba49deac116008d341fbf4 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 977.179059] env[62096]: INFO nova.scheduler.client.report [None req-76cc4f6b-4345-45aa-869a-e02283bb07dd tempest-ServerActionsTestOtherA-356376316 tempest-ServerActionsTestOtherA-356376316-project-member] Deleted allocations for instance e6ce8ba5-170a-4221-9e41-15580876f28f [ 977.185421] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-76cc4f6b-4345-45aa-869a-e02283bb07dd tempest-ServerActionsTestOtherA-356376316 tempest-ServerActionsTestOtherA-356376316-project-member] Expecting reply to msg 3479d2f1150747ff8b9cf01799a7cd3a in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 977.195913] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8f5ef45c4dba49deac116008d341fbf4 [ 977.203275] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3479d2f1150747ff8b9cf01799a7cd3a [ 977.219869] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ae1affc-e97c-4742-a8f1-c7cd45911843 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.227456] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ee62e6a-66db-489a-878c-813f56e3296e {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.258212] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94bdb661-e654-4b0b-bd9a-df8513e7fc24 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.265600] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03d4432b-ab9e-41b4-96b9-920cdcf20e30 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.279503] env[62096]: DEBUG nova.compute.provider_tree [None req-1f6cc430-d24d-425c-86fd-fb94cba13cee tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 977.280023] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1f6cc430-d24d-425c-86fd-fb94cba13cee tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg a98c1347cf804fe3a9b64b6d337128ad in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 977.290542] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a98c1347cf804fe3a9b64b6d337128ad [ 978.302621] env[62096]: DEBUG nova.compute.manager [req-ddffb9dd-b2f1-4cda-8816-07f48e59170a req-6d2edf0b-6714-4937-ab6c-f777cc5fbdbe service nova] [instance: 9ee34e47-6090-4617-a08c-0154874d4889] Received event network-changed-68714d46-8dc6-4486-a90c-fa88ca23de64 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 978.302939] env[62096]: DEBUG nova.compute.manager [req-ddffb9dd-b2f1-4cda-8816-07f48e59170a req-6d2edf0b-6714-4937-ab6c-f777cc5fbdbe service nova] [instance: 9ee34e47-6090-4617-a08c-0154874d4889] Refreshing instance network info cache due to event network-changed-68714d46-8dc6-4486-a90c-fa88ca23de64. {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 978.303028] env[62096]: DEBUG oslo_concurrency.lockutils [req-ddffb9dd-b2f1-4cda-8816-07f48e59170a req-6d2edf0b-6714-4937-ab6c-f777cc5fbdbe service nova] Acquiring lock "refresh_cache-9ee34e47-6090-4617-a08c-0154874d4889" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 978.303164] env[62096]: DEBUG oslo_concurrency.lockutils [req-ddffb9dd-b2f1-4cda-8816-07f48e59170a req-6d2edf0b-6714-4937-ab6c-f777cc5fbdbe service nova] Acquired lock "refresh_cache-9ee34e47-6090-4617-a08c-0154874d4889" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 978.303464] env[62096]: DEBUG nova.network.neutron [req-ddffb9dd-b2f1-4cda-8816-07f48e59170a req-6d2edf0b-6714-4937-ab6c-f777cc5fbdbe service nova] [instance: 9ee34e47-6090-4617-a08c-0154874d4889] Refreshing network info cache for port 68714d46-8dc6-4486-a90c-fa88ca23de64 {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 978.303719] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-ddffb9dd-b2f1-4cda-8816-07f48e59170a req-6d2edf0b-6714-4937-ab6c-f777cc5fbdbe service nova] Expecting reply to msg 84a570c63ffa4c1780ad157e2c4a8d6a in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 978.305323] env[62096]: ERROR nova.compute.manager [None req-c3832ec5-7c3d-4059-9e29-89d5368c4b4f tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 68714d46-8dc6-4486-a90c-fa88ca23de64, please check neutron logs for more information. [ 978.305323] env[62096]: ERROR nova.compute.manager Traceback (most recent call last): [ 978.305323] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 978.305323] env[62096]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 978.305323] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 978.305323] env[62096]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 978.305323] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 978.305323] env[62096]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 978.305323] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 978.305323] env[62096]: ERROR nova.compute.manager self.force_reraise() [ 978.305323] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 978.305323] env[62096]: ERROR nova.compute.manager raise self.value [ 978.305323] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 978.305323] env[62096]: ERROR nova.compute.manager updated_port = self._update_port( [ 978.305323] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 978.305323] env[62096]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 978.305817] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 978.305817] env[62096]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 978.305817] env[62096]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 68714d46-8dc6-4486-a90c-fa88ca23de64, please check neutron logs for more information. [ 978.305817] env[62096]: ERROR nova.compute.manager [ 978.305817] env[62096]: Traceback (most recent call last): [ 978.305817] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 978.305817] env[62096]: listener.cb(fileno) [ 978.305817] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 978.305817] env[62096]: result = function(*args, **kwargs) [ 978.305817] env[62096]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 978.305817] env[62096]: return func(*args, **kwargs) [ 978.305817] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 978.305817] env[62096]: raise e [ 978.305817] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 978.305817] env[62096]: nwinfo = self.network_api.allocate_for_instance( [ 978.305817] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 978.305817] env[62096]: created_port_ids = self._update_ports_for_instance( [ 978.305817] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 978.305817] env[62096]: with excutils.save_and_reraise_exception(): [ 978.305817] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 978.305817] env[62096]: self.force_reraise() [ 978.305817] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 978.305817] env[62096]: raise self.value [ 978.305817] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 978.305817] env[62096]: updated_port = self._update_port( [ 978.305817] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 978.305817] env[62096]: _ensure_no_port_binding_failure(port) [ 978.305817] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 978.305817] env[62096]: raise exception.PortBindingFailed(port_id=port['id']) [ 978.306651] env[62096]: nova.exception.PortBindingFailed: Binding failed for port 68714d46-8dc6-4486-a90c-fa88ca23de64, please check neutron logs for more information. [ 978.306651] env[62096]: Removing descriptor: 14 [ 978.308149] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c3832ec5-7c3d-4059-9e29-89d5368c4b4f tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg af2b44822e104ae2b6086161c5b9ebe0 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 978.309054] env[62096]: DEBUG oslo_concurrency.lockutils [None req-76cc4f6b-4345-45aa-869a-e02283bb07dd tempest-ServerActionsTestOtherA-356376316 tempest-ServerActionsTestOtherA-356376316-project-member] Lock "e6ce8ba5-170a-4221-9e41-15580876f28f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.891s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 978.309719] env[62096]: DEBUG nova.scheduler.client.report [None req-1f6cc430-d24d-425c-86fd-fb94cba13cee tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 978.311780] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1f6cc430-d24d-425c-86fd-fb94cba13cee tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg bfbb092cade94e1295528b6b532825e7 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 978.324843] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 84a570c63ffa4c1780ad157e2c4a8d6a [ 978.327927] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bfbb092cade94e1295528b6b532825e7 [ 978.340372] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg af2b44822e104ae2b6086161c5b9ebe0 [ 978.817115] env[62096]: DEBUG nova.compute.manager [None req-c3832ec5-7c3d-4059-9e29-89d5368c4b4f tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: 9ee34e47-6090-4617-a08c-0154874d4889] Start spawning the instance on the hypervisor. {{(pid=62096) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 978.820720] env[62096]: DEBUG oslo_concurrency.lockutils [None req-1f6cc430-d24d-425c-86fd-fb94cba13cee tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.686s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 978.822036] env[62096]: DEBUG nova.compute.manager [None req-1f6cc430-d24d-425c-86fd-fb94cba13cee tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] Start building networks asynchronously for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 978.823015] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1f6cc430-d24d-425c-86fd-fb94cba13cee tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg 9a59118d92f249888750aa91155e7424 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 978.824668] env[62096]: DEBUG oslo_concurrency.lockutils [None req-031e832e-abd3-4e5b-8f81-81280bf69042 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 6.124s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 978.826453] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-031e832e-abd3-4e5b-8f81-81280bf69042 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg b82b50f41345494fa9d2a68341903494 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 978.845898] env[62096]: DEBUG nova.network.neutron [req-ddffb9dd-b2f1-4cda-8816-07f48e59170a req-6d2edf0b-6714-4937-ab6c-f777cc5fbdbe service nova] [instance: 9ee34e47-6090-4617-a08c-0154874d4889] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 978.849758] env[62096]: DEBUG nova.virt.hardware [None req-c3832ec5-7c3d-4059-9e29-89d5368c4b4f tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-19T11:52:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-19T11:51:55Z,direct_url=,disk_format='vmdk',id=fb764baa-3805-45c4-a694-aa91b0932110,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bd76085d004b408bbcf1ebf76ba2593e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-19T11:51:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 978.849998] env[62096]: DEBUG nova.virt.hardware [None req-c3832ec5-7c3d-4059-9e29-89d5368c4b4f tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Flavor limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 978.850246] env[62096]: DEBUG nova.virt.hardware [None req-c3832ec5-7c3d-4059-9e29-89d5368c4b4f tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Image limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 978.850451] env[62096]: DEBUG nova.virt.hardware [None req-c3832ec5-7c3d-4059-9e29-89d5368c4b4f tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Flavor pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 978.850594] env[62096]: DEBUG nova.virt.hardware [None req-c3832ec5-7c3d-4059-9e29-89d5368c4b4f tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Image pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 978.850985] env[62096]: DEBUG nova.virt.hardware [None req-c3832ec5-7c3d-4059-9e29-89d5368c4b4f tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 978.850985] env[62096]: DEBUG nova.virt.hardware [None req-c3832ec5-7c3d-4059-9e29-89d5368c4b4f tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 978.851102] env[62096]: DEBUG nova.virt.hardware [None req-c3832ec5-7c3d-4059-9e29-89d5368c4b4f tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 978.851232] env[62096]: DEBUG nova.virt.hardware [None req-c3832ec5-7c3d-4059-9e29-89d5368c4b4f tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Got 1 possible topologies {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 978.851468] env[62096]: DEBUG nova.virt.hardware [None req-c3832ec5-7c3d-4059-9e29-89d5368c4b4f tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 978.851601] env[62096]: DEBUG nova.virt.hardware [None req-c3832ec5-7c3d-4059-9e29-89d5368c4b4f tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 978.853828] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b559072-4f4f-4483-8f1a-413fa4b0df4e {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.862173] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9a59118d92f249888750aa91155e7424 [ 978.863590] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57d412ea-3ac8-4286-917d-7a1eeba4e115 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.868417] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b82b50f41345494fa9d2a68341903494 [ 978.880221] env[62096]: ERROR nova.compute.manager [None req-c3832ec5-7c3d-4059-9e29-89d5368c4b4f tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: 9ee34e47-6090-4617-a08c-0154874d4889] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 68714d46-8dc6-4486-a90c-fa88ca23de64, please check neutron logs for more information. [ 978.880221] env[62096]: ERROR nova.compute.manager [instance: 9ee34e47-6090-4617-a08c-0154874d4889] Traceback (most recent call last): [ 978.880221] env[62096]: ERROR nova.compute.manager [instance: 9ee34e47-6090-4617-a08c-0154874d4889] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 978.880221] env[62096]: ERROR nova.compute.manager [instance: 9ee34e47-6090-4617-a08c-0154874d4889] yield resources [ 978.880221] env[62096]: ERROR nova.compute.manager [instance: 9ee34e47-6090-4617-a08c-0154874d4889] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 978.880221] env[62096]: ERROR nova.compute.manager [instance: 9ee34e47-6090-4617-a08c-0154874d4889] self.driver.spawn(context, instance, image_meta, [ 978.880221] env[62096]: ERROR nova.compute.manager [instance: 9ee34e47-6090-4617-a08c-0154874d4889] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 978.880221] env[62096]: ERROR nova.compute.manager [instance: 9ee34e47-6090-4617-a08c-0154874d4889] self._vmops.spawn(context, instance, image_meta, injected_files, [ 978.880221] env[62096]: ERROR nova.compute.manager [instance: 9ee34e47-6090-4617-a08c-0154874d4889] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 978.880221] env[62096]: ERROR nova.compute.manager [instance: 9ee34e47-6090-4617-a08c-0154874d4889] vm_ref = self.build_virtual_machine(instance, [ 978.880221] env[62096]: ERROR nova.compute.manager [instance: 9ee34e47-6090-4617-a08c-0154874d4889] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 978.880642] env[62096]: ERROR nova.compute.manager [instance: 9ee34e47-6090-4617-a08c-0154874d4889] vif_infos = vmwarevif.get_vif_info(self._session, [ 978.880642] env[62096]: ERROR nova.compute.manager [instance: 9ee34e47-6090-4617-a08c-0154874d4889] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 978.880642] env[62096]: ERROR nova.compute.manager [instance: 9ee34e47-6090-4617-a08c-0154874d4889] for vif in network_info: [ 978.880642] env[62096]: ERROR nova.compute.manager [instance: 9ee34e47-6090-4617-a08c-0154874d4889] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 978.880642] env[62096]: ERROR nova.compute.manager [instance: 9ee34e47-6090-4617-a08c-0154874d4889] return self._sync_wrapper(fn, *args, **kwargs) [ 978.880642] env[62096]: ERROR nova.compute.manager [instance: 9ee34e47-6090-4617-a08c-0154874d4889] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 978.880642] env[62096]: ERROR nova.compute.manager [instance: 9ee34e47-6090-4617-a08c-0154874d4889] self.wait() [ 978.880642] env[62096]: ERROR nova.compute.manager [instance: 9ee34e47-6090-4617-a08c-0154874d4889] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 978.880642] env[62096]: ERROR nova.compute.manager [instance: 9ee34e47-6090-4617-a08c-0154874d4889] self[:] = self._gt.wait() [ 978.880642] env[62096]: ERROR nova.compute.manager [instance: 9ee34e47-6090-4617-a08c-0154874d4889] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 978.880642] env[62096]: ERROR nova.compute.manager [instance: 9ee34e47-6090-4617-a08c-0154874d4889] return self._exit_event.wait() [ 978.880642] env[62096]: ERROR nova.compute.manager [instance: 9ee34e47-6090-4617-a08c-0154874d4889] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 978.880642] env[62096]: ERROR nova.compute.manager [instance: 9ee34e47-6090-4617-a08c-0154874d4889] current.throw(*self._exc) [ 978.881042] env[62096]: ERROR nova.compute.manager [instance: 9ee34e47-6090-4617-a08c-0154874d4889] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 978.881042] env[62096]: ERROR nova.compute.manager [instance: 9ee34e47-6090-4617-a08c-0154874d4889] result = function(*args, **kwargs) [ 978.881042] env[62096]: ERROR nova.compute.manager [instance: 9ee34e47-6090-4617-a08c-0154874d4889] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 978.881042] env[62096]: ERROR nova.compute.manager [instance: 9ee34e47-6090-4617-a08c-0154874d4889] return func(*args, **kwargs) [ 978.881042] env[62096]: ERROR nova.compute.manager [instance: 9ee34e47-6090-4617-a08c-0154874d4889] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 978.881042] env[62096]: ERROR nova.compute.manager [instance: 9ee34e47-6090-4617-a08c-0154874d4889] raise e [ 978.881042] env[62096]: ERROR nova.compute.manager [instance: 9ee34e47-6090-4617-a08c-0154874d4889] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 978.881042] env[62096]: ERROR nova.compute.manager [instance: 9ee34e47-6090-4617-a08c-0154874d4889] nwinfo = self.network_api.allocate_for_instance( [ 978.881042] env[62096]: ERROR nova.compute.manager [instance: 9ee34e47-6090-4617-a08c-0154874d4889] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 978.881042] env[62096]: ERROR nova.compute.manager [instance: 9ee34e47-6090-4617-a08c-0154874d4889] created_port_ids = self._update_ports_for_instance( [ 978.881042] env[62096]: ERROR nova.compute.manager [instance: 9ee34e47-6090-4617-a08c-0154874d4889] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 978.881042] env[62096]: ERROR nova.compute.manager [instance: 9ee34e47-6090-4617-a08c-0154874d4889] with excutils.save_and_reraise_exception(): [ 978.881042] env[62096]: ERROR nova.compute.manager [instance: 9ee34e47-6090-4617-a08c-0154874d4889] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 978.881472] env[62096]: ERROR nova.compute.manager [instance: 9ee34e47-6090-4617-a08c-0154874d4889] self.force_reraise() [ 978.881472] env[62096]: ERROR nova.compute.manager [instance: 9ee34e47-6090-4617-a08c-0154874d4889] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 978.881472] env[62096]: ERROR nova.compute.manager [instance: 9ee34e47-6090-4617-a08c-0154874d4889] raise self.value [ 978.881472] env[62096]: ERROR nova.compute.manager [instance: 9ee34e47-6090-4617-a08c-0154874d4889] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 978.881472] env[62096]: ERROR nova.compute.manager [instance: 9ee34e47-6090-4617-a08c-0154874d4889] updated_port = self._update_port( [ 978.881472] env[62096]: ERROR nova.compute.manager [instance: 9ee34e47-6090-4617-a08c-0154874d4889] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 978.881472] env[62096]: ERROR nova.compute.manager [instance: 9ee34e47-6090-4617-a08c-0154874d4889] _ensure_no_port_binding_failure(port) [ 978.881472] env[62096]: ERROR nova.compute.manager [instance: 9ee34e47-6090-4617-a08c-0154874d4889] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 978.881472] env[62096]: ERROR nova.compute.manager [instance: 9ee34e47-6090-4617-a08c-0154874d4889] raise exception.PortBindingFailed(port_id=port['id']) [ 978.881472] env[62096]: ERROR nova.compute.manager [instance: 9ee34e47-6090-4617-a08c-0154874d4889] nova.exception.PortBindingFailed: Binding failed for port 68714d46-8dc6-4486-a90c-fa88ca23de64, please check neutron logs for more information. [ 978.881472] env[62096]: ERROR nova.compute.manager [instance: 9ee34e47-6090-4617-a08c-0154874d4889] [ 978.881472] env[62096]: INFO nova.compute.manager [None req-c3832ec5-7c3d-4059-9e29-89d5368c4b4f tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: 9ee34e47-6090-4617-a08c-0154874d4889] Terminating instance [ 978.882570] env[62096]: DEBUG oslo_concurrency.lockutils [None req-c3832ec5-7c3d-4059-9e29-89d5368c4b4f tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Acquiring lock "refresh_cache-9ee34e47-6090-4617-a08c-0154874d4889" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 978.932419] env[62096]: DEBUG nova.network.neutron [req-ddffb9dd-b2f1-4cda-8816-07f48e59170a req-6d2edf0b-6714-4937-ab6c-f777cc5fbdbe service nova] [instance: 9ee34e47-6090-4617-a08c-0154874d4889] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 978.932928] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-ddffb9dd-b2f1-4cda-8816-07f48e59170a req-6d2edf0b-6714-4937-ab6c-f777cc5fbdbe service nova] Expecting reply to msg 53e9f4c5f3b3440fbbdd4949edecaa9b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 978.941424] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 53e9f4c5f3b3440fbbdd4949edecaa9b [ 979.329186] env[62096]: DEBUG nova.compute.utils [None req-1f6cc430-d24d-425c-86fd-fb94cba13cee tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Using /dev/sd instead of None {{(pid=62096) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 979.329811] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1f6cc430-d24d-425c-86fd-fb94cba13cee tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg d57060021b45417fac1fe3d6d88af75b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 979.333932] env[62096]: DEBUG nova.compute.manager [None req-1f6cc430-d24d-425c-86fd-fb94cba13cee tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] Allocating IP information in the background. {{(pid=62096) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 979.334146] env[62096]: DEBUG nova.network.neutron [None req-1f6cc430-d24d-425c-86fd-fb94cba13cee tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] allocate_for_instance() {{(pid=62096) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 979.340695] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d57060021b45417fac1fe3d6d88af75b [ 979.382929] env[62096]: DEBUG nova.policy [None req-1f6cc430-d24d-425c-86fd-fb94cba13cee tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6d00589b1aa24dd7beb7c3ac5cb2a8ca', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bffafc9dc83d477d823cd7364968f48a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62096) authorize /opt/stack/nova/nova/policy.py:203}} [ 979.387579] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d965b92-f6cb-4d35-b5f8-386403739aa3 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.394945] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7c8bd80-1141-4a07-81ee-5b2f23329d19 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.429080] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7345f79c-5a45-4b31-bf3e-e3f41b0c3e91 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.435950] env[62096]: DEBUG oslo_concurrency.lockutils [req-ddffb9dd-b2f1-4cda-8816-07f48e59170a req-6d2edf0b-6714-4937-ab6c-f777cc5fbdbe service nova] Releasing lock "refresh_cache-9ee34e47-6090-4617-a08c-0154874d4889" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 979.436433] env[62096]: DEBUG oslo_concurrency.lockutils [None req-c3832ec5-7c3d-4059-9e29-89d5368c4b4f tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Acquired lock "refresh_cache-9ee34e47-6090-4617-a08c-0154874d4889" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 979.436731] env[62096]: DEBUG nova.network.neutron [None req-c3832ec5-7c3d-4059-9e29-89d5368c4b4f tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: 9ee34e47-6090-4617-a08c-0154874d4889] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 979.437164] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c3832ec5-7c3d-4059-9e29-89d5368c4b4f tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg 7cca0a39fc2346eabd8b43aff297542c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 979.439563] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44a37bb5-65c1-4f47-b273-04be868d8145 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.444567] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7cca0a39fc2346eabd8b43aff297542c [ 979.454878] env[62096]: DEBUG nova.compute.provider_tree [None req-031e832e-abd3-4e5b-8f81-81280bf69042 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 979.455366] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-031e832e-abd3-4e5b-8f81-81280bf69042 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg 024d44f3f16c4b76b1c262860488c575 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 979.462254] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 024d44f3f16c4b76b1c262860488c575 [ 979.610512] env[62096]: DEBUG nova.compute.manager [req-ee954757-305a-4ef3-a099-088678943f35 req-c078f7d8-e7b7-4ac2-839e-099afa6c974a service nova] [instance: 9ee34e47-6090-4617-a08c-0154874d4889] Received event network-vif-deleted-68714d46-8dc6-4486-a90c-fa88ca23de64 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 979.645562] env[62096]: DEBUG nova.network.neutron [None req-1f6cc430-d24d-425c-86fd-fb94cba13cee tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] Successfully created port: 9cbfab3e-cd6c-4e31-a190-fbef55824076 {{(pid=62096) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 979.834396] env[62096]: DEBUG nova.compute.manager [None req-1f6cc430-d24d-425c-86fd-fb94cba13cee tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] Start building block device mappings for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 979.836402] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1f6cc430-d24d-425c-86fd-fb94cba13cee tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg 3c81f68431e9428eb061221094f88ecd in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 979.875090] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3c81f68431e9428eb061221094f88ecd [ 979.954538] env[62096]: DEBUG nova.network.neutron [None req-c3832ec5-7c3d-4059-9e29-89d5368c4b4f tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: 9ee34e47-6090-4617-a08c-0154874d4889] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 979.958172] env[62096]: DEBUG nova.scheduler.client.report [None req-031e832e-abd3-4e5b-8f81-81280bf69042 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 979.960377] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-031e832e-abd3-4e5b-8f81-81280bf69042 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg 9201fa97c73241599a7037be1e62e140 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 979.971026] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9201fa97c73241599a7037be1e62e140 [ 980.036779] env[62096]: DEBUG nova.network.neutron [None req-c3832ec5-7c3d-4059-9e29-89d5368c4b4f tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: 9ee34e47-6090-4617-a08c-0154874d4889] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 980.037374] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c3832ec5-7c3d-4059-9e29-89d5368c4b4f tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg 94a0ca68cfe54a73ac2a544ec5152def in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 980.046322] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 94a0ca68cfe54a73ac2a544ec5152def [ 980.341041] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1f6cc430-d24d-425c-86fd-fb94cba13cee tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg 121a6d75bba7473788321d03d5b6fa11 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 980.371520] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 121a6d75bba7473788321d03d5b6fa11 [ 980.462767] env[62096]: DEBUG oslo_concurrency.lockutils [None req-031e832e-abd3-4e5b-8f81-81280bf69042 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.638s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 980.463388] env[62096]: ERROR nova.compute.manager [None req-031e832e-abd3-4e5b-8f81-81280bf69042 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 533d8874-f924-49cc-ad4e-ba2d43bd335e, please check neutron logs for more information. [ 980.463388] env[62096]: ERROR nova.compute.manager [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] Traceback (most recent call last): [ 980.463388] env[62096]: ERROR nova.compute.manager [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 980.463388] env[62096]: ERROR nova.compute.manager [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] self.driver.spawn(context, instance, image_meta, [ 980.463388] env[62096]: ERROR nova.compute.manager [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 980.463388] env[62096]: ERROR nova.compute.manager [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] self._vmops.spawn(context, instance, image_meta, injected_files, [ 980.463388] env[62096]: ERROR nova.compute.manager [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 980.463388] env[62096]: ERROR nova.compute.manager [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] vm_ref = self.build_virtual_machine(instance, [ 980.463388] env[62096]: ERROR nova.compute.manager [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 980.463388] env[62096]: ERROR nova.compute.manager [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] vif_infos = vmwarevif.get_vif_info(self._session, [ 980.463388] env[62096]: ERROR nova.compute.manager [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 980.463772] env[62096]: ERROR nova.compute.manager [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] for vif in network_info: [ 980.463772] env[62096]: ERROR nova.compute.manager [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 980.463772] env[62096]: ERROR nova.compute.manager [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] return self._sync_wrapper(fn, *args, **kwargs) [ 980.463772] env[62096]: ERROR nova.compute.manager [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 980.463772] env[62096]: ERROR nova.compute.manager [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] self.wait() [ 980.463772] env[62096]: ERROR nova.compute.manager [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 980.463772] env[62096]: ERROR nova.compute.manager [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] self[:] = self._gt.wait() [ 980.463772] env[62096]: ERROR nova.compute.manager [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 980.463772] env[62096]: ERROR nova.compute.manager [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] return self._exit_event.wait() [ 980.463772] env[62096]: ERROR nova.compute.manager [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 980.463772] env[62096]: ERROR nova.compute.manager [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] current.throw(*self._exc) [ 980.463772] env[62096]: ERROR nova.compute.manager [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 980.463772] env[62096]: ERROR nova.compute.manager [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] result = function(*args, **kwargs) [ 980.464214] env[62096]: ERROR nova.compute.manager [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 980.464214] env[62096]: ERROR nova.compute.manager [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] return func(*args, **kwargs) [ 980.464214] env[62096]: ERROR nova.compute.manager [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 980.464214] env[62096]: ERROR nova.compute.manager [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] raise e [ 980.464214] env[62096]: ERROR nova.compute.manager [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 980.464214] env[62096]: ERROR nova.compute.manager [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] nwinfo = self.network_api.allocate_for_instance( [ 980.464214] env[62096]: ERROR nova.compute.manager [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 980.464214] env[62096]: ERROR nova.compute.manager [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] created_port_ids = self._update_ports_for_instance( [ 980.464214] env[62096]: ERROR nova.compute.manager [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 980.464214] env[62096]: ERROR nova.compute.manager [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] with excutils.save_and_reraise_exception(): [ 980.464214] env[62096]: ERROR nova.compute.manager [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 980.464214] env[62096]: ERROR nova.compute.manager [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] self.force_reraise() [ 980.464214] env[62096]: ERROR nova.compute.manager [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 980.464624] env[62096]: ERROR nova.compute.manager [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] raise self.value [ 980.464624] env[62096]: ERROR nova.compute.manager [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 980.464624] env[62096]: ERROR nova.compute.manager [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] updated_port = self._update_port( [ 980.464624] env[62096]: ERROR nova.compute.manager [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 980.464624] env[62096]: ERROR nova.compute.manager [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] _ensure_no_port_binding_failure(port) [ 980.464624] env[62096]: ERROR nova.compute.manager [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 980.464624] env[62096]: ERROR nova.compute.manager [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] raise exception.PortBindingFailed(port_id=port['id']) [ 980.464624] env[62096]: ERROR nova.compute.manager [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] nova.exception.PortBindingFailed: Binding failed for port 533d8874-f924-49cc-ad4e-ba2d43bd335e, please check neutron logs for more information. [ 980.464624] env[62096]: ERROR nova.compute.manager [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] [ 980.464624] env[62096]: DEBUG nova.compute.utils [None req-031e832e-abd3-4e5b-8f81-81280bf69042 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] Binding failed for port 533d8874-f924-49cc-ad4e-ba2d43bd335e, please check neutron logs for more information. {{(pid=62096) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 980.465991] env[62096]: ERROR nova.compute.manager [None req-1f6cc430-d24d-425c-86fd-fb94cba13cee tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 9cbfab3e-cd6c-4e31-a190-fbef55824076, please check neutron logs for more information. [ 980.465991] env[62096]: ERROR nova.compute.manager Traceback (most recent call last): [ 980.465991] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 980.465991] env[62096]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 980.465991] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 980.465991] env[62096]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 980.465991] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 980.465991] env[62096]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 980.465991] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 980.465991] env[62096]: ERROR nova.compute.manager self.force_reraise() [ 980.465991] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 980.465991] env[62096]: ERROR nova.compute.manager raise self.value [ 980.465991] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 980.465991] env[62096]: ERROR nova.compute.manager updated_port = self._update_port( [ 980.465991] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 980.465991] env[62096]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 980.466578] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 980.466578] env[62096]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 980.466578] env[62096]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 9cbfab3e-cd6c-4e31-a190-fbef55824076, please check neutron logs for more information. [ 980.466578] env[62096]: ERROR nova.compute.manager [ 980.466578] env[62096]: Traceback (most recent call last): [ 980.466578] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 980.466578] env[62096]: listener.cb(fileno) [ 980.466578] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 980.466578] env[62096]: result = function(*args, **kwargs) [ 980.466578] env[62096]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 980.466578] env[62096]: return func(*args, **kwargs) [ 980.466578] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 980.466578] env[62096]: raise e [ 980.466578] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 980.466578] env[62096]: nwinfo = self.network_api.allocate_for_instance( [ 980.466578] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 980.466578] env[62096]: created_port_ids = self._update_ports_for_instance( [ 980.466578] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 980.466578] env[62096]: with excutils.save_and_reraise_exception(): [ 980.466578] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 980.466578] env[62096]: self.force_reraise() [ 980.466578] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 980.466578] env[62096]: raise self.value [ 980.466578] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 980.466578] env[62096]: updated_port = self._update_port( [ 980.466578] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 980.466578] env[62096]: _ensure_no_port_binding_failure(port) [ 980.466578] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 980.466578] env[62096]: raise exception.PortBindingFailed(port_id=port['id']) [ 980.467494] env[62096]: nova.exception.PortBindingFailed: Binding failed for port 9cbfab3e-cd6c-4e31-a190-fbef55824076, please check neutron logs for more information. [ 980.467494] env[62096]: Removing descriptor: 14 [ 980.467494] env[62096]: DEBUG nova.compute.manager [None req-031e832e-abd3-4e5b-8f81-81280bf69042 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] Build of instance 66af0854-df0c-47ae-9f97-c62c55bf6601 was re-scheduled: Binding failed for port 533d8874-f924-49cc-ad4e-ba2d43bd335e, please check neutron logs for more information. {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 980.467494] env[62096]: DEBUG nova.compute.manager [None req-031e832e-abd3-4e5b-8f81-81280bf69042 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] Unplugging VIFs for instance {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 980.467494] env[62096]: DEBUG oslo_concurrency.lockutils [None req-031e832e-abd3-4e5b-8f81-81280bf69042 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Acquiring lock "refresh_cache-66af0854-df0c-47ae-9f97-c62c55bf6601" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 980.467494] env[62096]: DEBUG oslo_concurrency.lockutils [None req-031e832e-abd3-4e5b-8f81-81280bf69042 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Acquired lock "refresh_cache-66af0854-df0c-47ae-9f97-c62c55bf6601" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 980.467769] env[62096]: DEBUG nova.network.neutron [None req-031e832e-abd3-4e5b-8f81-81280bf69042 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 980.467925] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-031e832e-abd3-4e5b-8f81-81280bf69042 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg 37b5a8bf54ef4986926a81f680d52aa7 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 980.475974] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 37b5a8bf54ef4986926a81f680d52aa7 [ 980.540016] env[62096]: DEBUG oslo_concurrency.lockutils [None req-c3832ec5-7c3d-4059-9e29-89d5368c4b4f tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Releasing lock "refresh_cache-9ee34e47-6090-4617-a08c-0154874d4889" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 980.540490] env[62096]: DEBUG nova.compute.manager [None req-c3832ec5-7c3d-4059-9e29-89d5368c4b4f tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: 9ee34e47-6090-4617-a08c-0154874d4889] Start destroying the instance on the hypervisor. {{(pid=62096) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 980.540693] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-c3832ec5-7c3d-4059-9e29-89d5368c4b4f tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: 9ee34e47-6090-4617-a08c-0154874d4889] Destroying instance {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 980.540977] env[62096]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-dad57f3f-939f-4165-add7-afff434863cc {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.550377] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b719e9fc-5b9a-4fe7-8d22-ac09ba25393e {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.570982] env[62096]: WARNING nova.virt.vmwareapi.vmops [None req-c3832ec5-7c3d-4059-9e29-89d5368c4b4f tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: 9ee34e47-6090-4617-a08c-0154874d4889] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 9ee34e47-6090-4617-a08c-0154874d4889 could not be found. [ 980.571185] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-c3832ec5-7c3d-4059-9e29-89d5368c4b4f tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: 9ee34e47-6090-4617-a08c-0154874d4889] Instance destroyed {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 980.571363] env[62096]: INFO nova.compute.manager [None req-c3832ec5-7c3d-4059-9e29-89d5368c4b4f tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: 9ee34e47-6090-4617-a08c-0154874d4889] Took 0.03 seconds to destroy the instance on the hypervisor. [ 980.571595] env[62096]: DEBUG oslo.service.loopingcall [None req-c3832ec5-7c3d-4059-9e29-89d5368c4b4f tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 980.571796] env[62096]: DEBUG nova.compute.manager [-] [instance: 9ee34e47-6090-4617-a08c-0154874d4889] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 980.571883] env[62096]: DEBUG nova.network.neutron [-] [instance: 9ee34e47-6090-4617-a08c-0154874d4889] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 980.585658] env[62096]: DEBUG nova.network.neutron [-] [instance: 9ee34e47-6090-4617-a08c-0154874d4889] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 980.586124] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 750116d6966f47509eafca42e3ff47c0 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 980.593542] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 750116d6966f47509eafca42e3ff47c0 [ 980.845578] env[62096]: DEBUG nova.compute.manager [None req-1f6cc430-d24d-425c-86fd-fb94cba13cee tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] Start spawning the instance on the hypervisor. {{(pid=62096) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 980.870626] env[62096]: DEBUG nova.virt.hardware [None req-1f6cc430-d24d-425c-86fd-fb94cba13cee tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-19T11:52:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-19T11:51:55Z,direct_url=,disk_format='vmdk',id=fb764baa-3805-45c4-a694-aa91b0932110,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bd76085d004b408bbcf1ebf76ba2593e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-19T11:51:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 980.870876] env[62096]: DEBUG nova.virt.hardware [None req-1f6cc430-d24d-425c-86fd-fb94cba13cee tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Flavor limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 980.871029] env[62096]: DEBUG nova.virt.hardware [None req-1f6cc430-d24d-425c-86fd-fb94cba13cee tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Image limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 980.871211] env[62096]: DEBUG nova.virt.hardware [None req-1f6cc430-d24d-425c-86fd-fb94cba13cee tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Flavor pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 980.871350] env[62096]: DEBUG nova.virt.hardware [None req-1f6cc430-d24d-425c-86fd-fb94cba13cee tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Image pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 980.871489] env[62096]: DEBUG nova.virt.hardware [None req-1f6cc430-d24d-425c-86fd-fb94cba13cee tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 980.871684] env[62096]: DEBUG nova.virt.hardware [None req-1f6cc430-d24d-425c-86fd-fb94cba13cee tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 980.871837] env[62096]: DEBUG nova.virt.hardware [None req-1f6cc430-d24d-425c-86fd-fb94cba13cee tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 980.871997] env[62096]: DEBUG nova.virt.hardware [None req-1f6cc430-d24d-425c-86fd-fb94cba13cee tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Got 1 possible topologies {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 980.872171] env[62096]: DEBUG nova.virt.hardware [None req-1f6cc430-d24d-425c-86fd-fb94cba13cee tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 980.872343] env[62096]: DEBUG nova.virt.hardware [None req-1f6cc430-d24d-425c-86fd-fb94cba13cee tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 980.873166] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf88d90c-3abe-4132-bd1a-611926652bab {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.880963] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b6a8332-aa8e-4bad-8088-ef8f354da83a {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.894087] env[62096]: ERROR nova.compute.manager [None req-1f6cc430-d24d-425c-86fd-fb94cba13cee tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 9cbfab3e-cd6c-4e31-a190-fbef55824076, please check neutron logs for more information. [ 980.894087] env[62096]: ERROR nova.compute.manager [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] Traceback (most recent call last): [ 980.894087] env[62096]: ERROR nova.compute.manager [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 980.894087] env[62096]: ERROR nova.compute.manager [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] yield resources [ 980.894087] env[62096]: ERROR nova.compute.manager [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 980.894087] env[62096]: ERROR nova.compute.manager [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] self.driver.spawn(context, instance, image_meta, [ 980.894087] env[62096]: ERROR nova.compute.manager [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 980.894087] env[62096]: ERROR nova.compute.manager [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 980.894087] env[62096]: ERROR nova.compute.manager [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 980.894087] env[62096]: ERROR nova.compute.manager [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] vm_ref = self.build_virtual_machine(instance, [ 980.894087] env[62096]: ERROR nova.compute.manager [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 980.894503] env[62096]: ERROR nova.compute.manager [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] vif_infos = vmwarevif.get_vif_info(self._session, [ 980.894503] env[62096]: ERROR nova.compute.manager [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 980.894503] env[62096]: ERROR nova.compute.manager [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] for vif in network_info: [ 980.894503] env[62096]: ERROR nova.compute.manager [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 980.894503] env[62096]: ERROR nova.compute.manager [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] return self._sync_wrapper(fn, *args, **kwargs) [ 980.894503] env[62096]: ERROR nova.compute.manager [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 980.894503] env[62096]: ERROR nova.compute.manager [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] self.wait() [ 980.894503] env[62096]: ERROR nova.compute.manager [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 980.894503] env[62096]: ERROR nova.compute.manager [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] self[:] = self._gt.wait() [ 980.894503] env[62096]: ERROR nova.compute.manager [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 980.894503] env[62096]: ERROR nova.compute.manager [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] return self._exit_event.wait() [ 980.894503] env[62096]: ERROR nova.compute.manager [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 980.894503] env[62096]: ERROR nova.compute.manager [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] current.throw(*self._exc) [ 980.895358] env[62096]: ERROR nova.compute.manager [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 980.895358] env[62096]: ERROR nova.compute.manager [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] result = function(*args, **kwargs) [ 980.895358] env[62096]: ERROR nova.compute.manager [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 980.895358] env[62096]: ERROR nova.compute.manager [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] return func(*args, **kwargs) [ 980.895358] env[62096]: ERROR nova.compute.manager [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 980.895358] env[62096]: ERROR nova.compute.manager [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] raise e [ 980.895358] env[62096]: ERROR nova.compute.manager [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 980.895358] env[62096]: ERROR nova.compute.manager [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] nwinfo = self.network_api.allocate_for_instance( [ 980.895358] env[62096]: ERROR nova.compute.manager [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 980.895358] env[62096]: ERROR nova.compute.manager [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] created_port_ids = self._update_ports_for_instance( [ 980.895358] env[62096]: ERROR nova.compute.manager [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 980.895358] env[62096]: ERROR nova.compute.manager [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] with excutils.save_and_reraise_exception(): [ 980.895358] env[62096]: ERROR nova.compute.manager [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 980.895788] env[62096]: ERROR nova.compute.manager [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] self.force_reraise() [ 980.895788] env[62096]: ERROR nova.compute.manager [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 980.895788] env[62096]: ERROR nova.compute.manager [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] raise self.value [ 980.895788] env[62096]: ERROR nova.compute.manager [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 980.895788] env[62096]: ERROR nova.compute.manager [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] updated_port = self._update_port( [ 980.895788] env[62096]: ERROR nova.compute.manager [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 980.895788] env[62096]: ERROR nova.compute.manager [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] _ensure_no_port_binding_failure(port) [ 980.895788] env[62096]: ERROR nova.compute.manager [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 980.895788] env[62096]: ERROR nova.compute.manager [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] raise exception.PortBindingFailed(port_id=port['id']) [ 980.895788] env[62096]: ERROR nova.compute.manager [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] nova.exception.PortBindingFailed: Binding failed for port 9cbfab3e-cd6c-4e31-a190-fbef55824076, please check neutron logs for more information. [ 980.895788] env[62096]: ERROR nova.compute.manager [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] [ 980.895788] env[62096]: INFO nova.compute.manager [None req-1f6cc430-d24d-425c-86fd-fb94cba13cee tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] Terminating instance [ 980.896356] env[62096]: DEBUG oslo_concurrency.lockutils [None req-1f6cc430-d24d-425c-86fd-fb94cba13cee tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Acquiring lock "refresh_cache-adea4ebf-6b22-4fd2-b601-bad448e4bbc5" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 980.896514] env[62096]: DEBUG oslo_concurrency.lockutils [None req-1f6cc430-d24d-425c-86fd-fb94cba13cee tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Acquired lock "refresh_cache-adea4ebf-6b22-4fd2-b601-bad448e4bbc5" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 980.896676] env[62096]: DEBUG nova.network.neutron [None req-1f6cc430-d24d-425c-86fd-fb94cba13cee tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 980.897081] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1f6cc430-d24d-425c-86fd-fb94cba13cee tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg 78ad4d4038f34070bd3b4439505d8925 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 980.903406] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 78ad4d4038f34070bd3b4439505d8925 [ 980.989013] env[62096]: DEBUG nova.network.neutron [None req-031e832e-abd3-4e5b-8f81-81280bf69042 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 981.087838] env[62096]: DEBUG nova.network.neutron [-] [instance: 9ee34e47-6090-4617-a08c-0154874d4889] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 981.088350] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg e54dda8bd3bb405bb9aa58de771051d0 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 981.098047] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e54dda8bd3bb405bb9aa58de771051d0 [ 981.106069] env[62096]: DEBUG nova.network.neutron [None req-031e832e-abd3-4e5b-8f81-81280bf69042 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 981.106688] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-031e832e-abd3-4e5b-8f81-81280bf69042 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg 2768ecd426184798941cf70b1f965dc4 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 981.114926] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2768ecd426184798941cf70b1f965dc4 [ 981.414841] env[62096]: DEBUG nova.network.neutron [None req-1f6cc430-d24d-425c-86fd-fb94cba13cee tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 981.482510] env[62096]: DEBUG nova.network.neutron [None req-1f6cc430-d24d-425c-86fd-fb94cba13cee tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 981.483041] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1f6cc430-d24d-425c-86fd-fb94cba13cee tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg 8da028bbf782459b8aebed393856d04c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 981.491578] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8da028bbf782459b8aebed393856d04c [ 981.591384] env[62096]: INFO nova.compute.manager [-] [instance: 9ee34e47-6090-4617-a08c-0154874d4889] Took 1.02 seconds to deallocate network for instance. [ 981.593741] env[62096]: DEBUG nova.compute.claims [None req-c3832ec5-7c3d-4059-9e29-89d5368c4b4f tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: 9ee34e47-6090-4617-a08c-0154874d4889] Aborting claim: {{(pid=62096) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 981.593926] env[62096]: DEBUG oslo_concurrency.lockutils [None req-c3832ec5-7c3d-4059-9e29-89d5368c4b4f tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 981.594317] env[62096]: DEBUG oslo_concurrency.lockutils [None req-c3832ec5-7c3d-4059-9e29-89d5368c4b4f tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 981.596137] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c3832ec5-7c3d-4059-9e29-89d5368c4b4f tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg 2390faf31cc1499abcb7c2faf706a15b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 981.608678] env[62096]: DEBUG oslo_concurrency.lockutils [None req-031e832e-abd3-4e5b-8f81-81280bf69042 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Releasing lock "refresh_cache-66af0854-df0c-47ae-9f97-c62c55bf6601" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 981.608884] env[62096]: DEBUG nova.compute.manager [None req-031e832e-abd3-4e5b-8f81-81280bf69042 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 981.609060] env[62096]: DEBUG nova.compute.manager [None req-031e832e-abd3-4e5b-8f81-81280bf69042 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 981.609219] env[62096]: DEBUG nova.network.neutron [None req-031e832e-abd3-4e5b-8f81-81280bf69042 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 981.627005] env[62096]: DEBUG nova.network.neutron [None req-031e832e-abd3-4e5b-8f81-81280bf69042 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 981.627538] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-031e832e-abd3-4e5b-8f81-81280bf69042 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg 379bc13189e745b6afa99b41aa902f34 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 981.633905] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 379bc13189e745b6afa99b41aa902f34 [ 981.641701] env[62096]: DEBUG nova.compute.manager [req-690e7efd-69a4-4f53-8625-f07e3a4dc91f req-53495ac3-a31e-402f-8dea-29ff233189b5 service nova] [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] Received event network-changed-9cbfab3e-cd6c-4e31-a190-fbef55824076 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 981.641888] env[62096]: DEBUG nova.compute.manager [req-690e7efd-69a4-4f53-8625-f07e3a4dc91f req-53495ac3-a31e-402f-8dea-29ff233189b5 service nova] [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] Refreshing instance network info cache due to event network-changed-9cbfab3e-cd6c-4e31-a190-fbef55824076. {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 981.642189] env[62096]: DEBUG oslo_concurrency.lockutils [req-690e7efd-69a4-4f53-8625-f07e3a4dc91f req-53495ac3-a31e-402f-8dea-29ff233189b5 service nova] Acquiring lock "refresh_cache-adea4ebf-6b22-4fd2-b601-bad448e4bbc5" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 981.642547] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2390faf31cc1499abcb7c2faf706a15b [ 981.985812] env[62096]: DEBUG oslo_concurrency.lockutils [None req-1f6cc430-d24d-425c-86fd-fb94cba13cee tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Releasing lock "refresh_cache-adea4ebf-6b22-4fd2-b601-bad448e4bbc5" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 981.986221] env[62096]: DEBUG nova.compute.manager [None req-1f6cc430-d24d-425c-86fd-fb94cba13cee tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] Start destroying the instance on the hypervisor. {{(pid=62096) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 981.986433] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-1f6cc430-d24d-425c-86fd-fb94cba13cee tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] Destroying instance {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 981.986740] env[62096]: DEBUG oslo_concurrency.lockutils [req-690e7efd-69a4-4f53-8625-f07e3a4dc91f req-53495ac3-a31e-402f-8dea-29ff233189b5 service nova] Acquired lock "refresh_cache-adea4ebf-6b22-4fd2-b601-bad448e4bbc5" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 981.986904] env[62096]: DEBUG nova.network.neutron [req-690e7efd-69a4-4f53-8625-f07e3a4dc91f req-53495ac3-a31e-402f-8dea-29ff233189b5 service nova] [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] Refreshing network info cache for port 9cbfab3e-cd6c-4e31-a190-fbef55824076 {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 981.987337] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-690e7efd-69a4-4f53-8625-f07e3a4dc91f req-53495ac3-a31e-402f-8dea-29ff233189b5 service nova] Expecting reply to msg 31cf4f551bf54d43ad8189b0f8b0fd07 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 981.988458] env[62096]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-32f89ad7-52b4-43a8-aa04-6a5f7513b406 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.995059] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 31cf4f551bf54d43ad8189b0f8b0fd07 [ 981.997936] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1df1a5c-40d3-41c6-a263-e48afb28bf5f {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.018529] env[62096]: WARNING nova.virt.vmwareapi.vmops [None req-1f6cc430-d24d-425c-86fd-fb94cba13cee tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance adea4ebf-6b22-4fd2-b601-bad448e4bbc5 could not be found. [ 982.018731] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-1f6cc430-d24d-425c-86fd-fb94cba13cee tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] Instance destroyed {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 982.018905] env[62096]: INFO nova.compute.manager [None req-1f6cc430-d24d-425c-86fd-fb94cba13cee tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] Took 0.03 seconds to destroy the instance on the hypervisor. [ 982.019138] env[62096]: DEBUG oslo.service.loopingcall [None req-1f6cc430-d24d-425c-86fd-fb94cba13cee tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 982.019360] env[62096]: DEBUG nova.compute.manager [-] [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 982.019455] env[62096]: DEBUG nova.network.neutron [-] [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 982.033277] env[62096]: DEBUG nova.network.neutron [-] [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 982.033700] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 42da60f894a74eeaa40e5713479be130 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 982.039960] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 42da60f894a74eeaa40e5713479be130 [ 982.071633] env[62096]: DEBUG oslo_service.periodic_task [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62096) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 982.071823] env[62096]: DEBUG oslo_service.periodic_task [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62096) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 982.072464] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg 12249c864b944005bcdd53b0582dcd9b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 982.080531] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 12249c864b944005bcdd53b0582dcd9b [ 982.129453] env[62096]: DEBUG nova.network.neutron [None req-031e832e-abd3-4e5b-8f81-81280bf69042 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 982.129951] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-031e832e-abd3-4e5b-8f81-81280bf69042 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg 5f20540912b44fb18f24f7e61169461b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 982.138459] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5f20540912b44fb18f24f7e61169461b [ 982.152445] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-865c00aa-8daa-42b6-9a69-dee88c77cf43 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.159959] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6c9be2e-d903-41ca-8dbb-e159fc899a2c {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.190937] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68a4f301-68b5-4989-a175-89a606a3c687 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.199586] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e929aa3a-18c8-4a33-944e-6c6c3c5d6760 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.214308] env[62096]: DEBUG nova.compute.provider_tree [None req-c3832ec5-7c3d-4059-9e29-89d5368c4b4f tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 982.215033] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c3832ec5-7c3d-4059-9e29-89d5368c4b4f tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg 0e970f3f5e5d4194869b5f8b79f336f0 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 982.224618] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0e970f3f5e5d4194869b5f8b79f336f0 [ 982.510878] env[62096]: DEBUG nova.network.neutron [req-690e7efd-69a4-4f53-8625-f07e3a4dc91f req-53495ac3-a31e-402f-8dea-29ff233189b5 service nova] [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 982.535263] env[62096]: DEBUG nova.network.neutron [-] [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 982.535742] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 477799eb307a4bcc8eed81cf26cf3d3e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 982.544910] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 477799eb307a4bcc8eed81cf26cf3d3e [ 982.578122] env[62096]: DEBUG oslo_service.periodic_task [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62096) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 982.578253] env[62096]: DEBUG oslo_service.periodic_task [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62096) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 982.578377] env[62096]: DEBUG nova.compute.manager [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62096) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10545}} [ 982.591929] env[62096]: DEBUG nova.network.neutron [req-690e7efd-69a4-4f53-8625-f07e3a4dc91f req-53495ac3-a31e-402f-8dea-29ff233189b5 service nova] [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 982.592469] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-690e7efd-69a4-4f53-8625-f07e3a4dc91f req-53495ac3-a31e-402f-8dea-29ff233189b5 service nova] Expecting reply to msg d4185a7a64fa4448884dda1d7c1ca102 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 982.600475] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d4185a7a64fa4448884dda1d7c1ca102 [ 982.631898] env[62096]: INFO nova.compute.manager [None req-031e832e-abd3-4e5b-8f81-81280bf69042 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 66af0854-df0c-47ae-9f97-c62c55bf6601] Took 1.02 seconds to deallocate network for instance. [ 982.634129] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-031e832e-abd3-4e5b-8f81-81280bf69042 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg daea94aacf044c79bec65e9a389377ed in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 982.668447] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg daea94aacf044c79bec65e9a389377ed [ 982.718475] env[62096]: DEBUG nova.scheduler.client.report [None req-c3832ec5-7c3d-4059-9e29-89d5368c4b4f tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 982.720748] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c3832ec5-7c3d-4059-9e29-89d5368c4b4f tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg 0631ee455b944165a32632b9c00830dc in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 982.731399] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0631ee455b944165a32632b9c00830dc [ 983.037783] env[62096]: INFO nova.compute.manager [-] [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] Took 1.02 seconds to deallocate network for instance. [ 983.041309] env[62096]: DEBUG nova.compute.claims [None req-1f6cc430-d24d-425c-86fd-fb94cba13cee tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] Aborting claim: {{(pid=62096) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 983.041496] env[62096]: DEBUG oslo_concurrency.lockutils [None req-1f6cc430-d24d-425c-86fd-fb94cba13cee tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 983.094818] env[62096]: DEBUG oslo_concurrency.lockutils [req-690e7efd-69a4-4f53-8625-f07e3a4dc91f req-53495ac3-a31e-402f-8dea-29ff233189b5 service nova] Releasing lock "refresh_cache-adea4ebf-6b22-4fd2-b601-bad448e4bbc5" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 983.095082] env[62096]: DEBUG nova.compute.manager [req-690e7efd-69a4-4f53-8625-f07e3a4dc91f req-53495ac3-a31e-402f-8dea-29ff233189b5 service nova] [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] Received event network-vif-deleted-9cbfab3e-cd6c-4e31-a190-fbef55824076 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 983.138938] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-031e832e-abd3-4e5b-8f81-81280bf69042 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg 629405b203ea45b1a4b4c1f31431b4be in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 983.176405] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 629405b203ea45b1a4b4c1f31431b4be [ 983.223670] env[62096]: DEBUG oslo_concurrency.lockutils [None req-c3832ec5-7c3d-4059-9e29-89d5368c4b4f tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.629s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 983.224350] env[62096]: ERROR nova.compute.manager [None req-c3832ec5-7c3d-4059-9e29-89d5368c4b4f tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: 9ee34e47-6090-4617-a08c-0154874d4889] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 68714d46-8dc6-4486-a90c-fa88ca23de64, please check neutron logs for more information. [ 983.224350] env[62096]: ERROR nova.compute.manager [instance: 9ee34e47-6090-4617-a08c-0154874d4889] Traceback (most recent call last): [ 983.224350] env[62096]: ERROR nova.compute.manager [instance: 9ee34e47-6090-4617-a08c-0154874d4889] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 983.224350] env[62096]: ERROR nova.compute.manager [instance: 9ee34e47-6090-4617-a08c-0154874d4889] self.driver.spawn(context, instance, image_meta, [ 983.224350] env[62096]: ERROR nova.compute.manager [instance: 9ee34e47-6090-4617-a08c-0154874d4889] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 983.224350] env[62096]: ERROR nova.compute.manager [instance: 9ee34e47-6090-4617-a08c-0154874d4889] self._vmops.spawn(context, instance, image_meta, injected_files, [ 983.224350] env[62096]: ERROR nova.compute.manager [instance: 9ee34e47-6090-4617-a08c-0154874d4889] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 983.224350] env[62096]: ERROR nova.compute.manager [instance: 9ee34e47-6090-4617-a08c-0154874d4889] vm_ref = self.build_virtual_machine(instance, [ 983.224350] env[62096]: ERROR nova.compute.manager [instance: 9ee34e47-6090-4617-a08c-0154874d4889] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 983.224350] env[62096]: ERROR nova.compute.manager [instance: 9ee34e47-6090-4617-a08c-0154874d4889] vif_infos = vmwarevif.get_vif_info(self._session, [ 983.224350] env[62096]: ERROR nova.compute.manager [instance: 9ee34e47-6090-4617-a08c-0154874d4889] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 983.224807] env[62096]: ERROR nova.compute.manager [instance: 9ee34e47-6090-4617-a08c-0154874d4889] for vif in network_info: [ 983.224807] env[62096]: ERROR nova.compute.manager [instance: 9ee34e47-6090-4617-a08c-0154874d4889] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 983.224807] env[62096]: ERROR nova.compute.manager [instance: 9ee34e47-6090-4617-a08c-0154874d4889] return self._sync_wrapper(fn, *args, **kwargs) [ 983.224807] env[62096]: ERROR nova.compute.manager [instance: 9ee34e47-6090-4617-a08c-0154874d4889] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 983.224807] env[62096]: ERROR nova.compute.manager [instance: 9ee34e47-6090-4617-a08c-0154874d4889] self.wait() [ 983.224807] env[62096]: ERROR nova.compute.manager [instance: 9ee34e47-6090-4617-a08c-0154874d4889] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 983.224807] env[62096]: ERROR nova.compute.manager [instance: 9ee34e47-6090-4617-a08c-0154874d4889] self[:] = self._gt.wait() [ 983.224807] env[62096]: ERROR nova.compute.manager [instance: 9ee34e47-6090-4617-a08c-0154874d4889] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 983.224807] env[62096]: ERROR nova.compute.manager [instance: 9ee34e47-6090-4617-a08c-0154874d4889] return self._exit_event.wait() [ 983.224807] env[62096]: ERROR nova.compute.manager [instance: 9ee34e47-6090-4617-a08c-0154874d4889] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 983.224807] env[62096]: ERROR nova.compute.manager [instance: 9ee34e47-6090-4617-a08c-0154874d4889] current.throw(*self._exc) [ 983.224807] env[62096]: ERROR nova.compute.manager [instance: 9ee34e47-6090-4617-a08c-0154874d4889] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 983.224807] env[62096]: ERROR nova.compute.manager [instance: 9ee34e47-6090-4617-a08c-0154874d4889] result = function(*args, **kwargs) [ 983.225290] env[62096]: ERROR nova.compute.manager [instance: 9ee34e47-6090-4617-a08c-0154874d4889] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 983.225290] env[62096]: ERROR nova.compute.manager [instance: 9ee34e47-6090-4617-a08c-0154874d4889] return func(*args, **kwargs) [ 983.225290] env[62096]: ERROR nova.compute.manager [instance: 9ee34e47-6090-4617-a08c-0154874d4889] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 983.225290] env[62096]: ERROR nova.compute.manager [instance: 9ee34e47-6090-4617-a08c-0154874d4889] raise e [ 983.225290] env[62096]: ERROR nova.compute.manager [instance: 9ee34e47-6090-4617-a08c-0154874d4889] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 983.225290] env[62096]: ERROR nova.compute.manager [instance: 9ee34e47-6090-4617-a08c-0154874d4889] nwinfo = self.network_api.allocate_for_instance( [ 983.225290] env[62096]: ERROR nova.compute.manager [instance: 9ee34e47-6090-4617-a08c-0154874d4889] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 983.225290] env[62096]: ERROR nova.compute.manager [instance: 9ee34e47-6090-4617-a08c-0154874d4889] created_port_ids = self._update_ports_for_instance( [ 983.225290] env[62096]: ERROR nova.compute.manager [instance: 9ee34e47-6090-4617-a08c-0154874d4889] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 983.225290] env[62096]: ERROR nova.compute.manager [instance: 9ee34e47-6090-4617-a08c-0154874d4889] with excutils.save_and_reraise_exception(): [ 983.225290] env[62096]: ERROR nova.compute.manager [instance: 9ee34e47-6090-4617-a08c-0154874d4889] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 983.225290] env[62096]: ERROR nova.compute.manager [instance: 9ee34e47-6090-4617-a08c-0154874d4889] self.force_reraise() [ 983.225290] env[62096]: ERROR nova.compute.manager [instance: 9ee34e47-6090-4617-a08c-0154874d4889] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 983.225720] env[62096]: ERROR nova.compute.manager [instance: 9ee34e47-6090-4617-a08c-0154874d4889] raise self.value [ 983.225720] env[62096]: ERROR nova.compute.manager [instance: 9ee34e47-6090-4617-a08c-0154874d4889] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 983.225720] env[62096]: ERROR nova.compute.manager [instance: 9ee34e47-6090-4617-a08c-0154874d4889] updated_port = self._update_port( [ 983.225720] env[62096]: ERROR nova.compute.manager [instance: 9ee34e47-6090-4617-a08c-0154874d4889] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 983.225720] env[62096]: ERROR nova.compute.manager [instance: 9ee34e47-6090-4617-a08c-0154874d4889] _ensure_no_port_binding_failure(port) [ 983.225720] env[62096]: ERROR nova.compute.manager [instance: 9ee34e47-6090-4617-a08c-0154874d4889] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 983.225720] env[62096]: ERROR nova.compute.manager [instance: 9ee34e47-6090-4617-a08c-0154874d4889] raise exception.PortBindingFailed(port_id=port['id']) [ 983.225720] env[62096]: ERROR nova.compute.manager [instance: 9ee34e47-6090-4617-a08c-0154874d4889] nova.exception.PortBindingFailed: Binding failed for port 68714d46-8dc6-4486-a90c-fa88ca23de64, please check neutron logs for more information. [ 983.225720] env[62096]: ERROR nova.compute.manager [instance: 9ee34e47-6090-4617-a08c-0154874d4889] [ 983.225720] env[62096]: DEBUG nova.compute.utils [None req-c3832ec5-7c3d-4059-9e29-89d5368c4b4f tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: 9ee34e47-6090-4617-a08c-0154874d4889] Binding failed for port 68714d46-8dc6-4486-a90c-fa88ca23de64, please check neutron logs for more information. {{(pid=62096) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 983.226263] env[62096]: DEBUG oslo_concurrency.lockutils [None req-1f6cc430-d24d-425c-86fd-fb94cba13cee tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.185s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 983.227970] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1f6cc430-d24d-425c-86fd-fb94cba13cee tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg 088504c796254194a874f987bcbbb497 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 983.229041] env[62096]: DEBUG nova.compute.manager [None req-c3832ec5-7c3d-4059-9e29-89d5368c4b4f tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: 9ee34e47-6090-4617-a08c-0154874d4889] Build of instance 9ee34e47-6090-4617-a08c-0154874d4889 was re-scheduled: Binding failed for port 68714d46-8dc6-4486-a90c-fa88ca23de64, please check neutron logs for more information. {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 983.229449] env[62096]: DEBUG nova.compute.manager [None req-c3832ec5-7c3d-4059-9e29-89d5368c4b4f tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: 9ee34e47-6090-4617-a08c-0154874d4889] Unplugging VIFs for instance {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 983.229667] env[62096]: DEBUG oslo_concurrency.lockutils [None req-c3832ec5-7c3d-4059-9e29-89d5368c4b4f tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Acquiring lock "refresh_cache-9ee34e47-6090-4617-a08c-0154874d4889" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 983.229810] env[62096]: DEBUG oslo_concurrency.lockutils [None req-c3832ec5-7c3d-4059-9e29-89d5368c4b4f tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Acquired lock "refresh_cache-9ee34e47-6090-4617-a08c-0154874d4889" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 983.229966] env[62096]: DEBUG nova.network.neutron [None req-c3832ec5-7c3d-4059-9e29-89d5368c4b4f tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: 9ee34e47-6090-4617-a08c-0154874d4889] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 983.230325] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c3832ec5-7c3d-4059-9e29-89d5368c4b4f tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg 4ac175bb2627451385b042d453759afa in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 983.236611] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4ac175bb2627451385b042d453759afa [ 983.258928] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 088504c796254194a874f987bcbbb497 [ 983.663856] env[62096]: INFO nova.scheduler.client.report [None req-031e832e-abd3-4e5b-8f81-81280bf69042 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Deleted allocations for instance 66af0854-df0c-47ae-9f97-c62c55bf6601 [ 983.669704] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-031e832e-abd3-4e5b-8f81-81280bf69042 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg 254f6334165a4c16b591c3b1fa766183 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 983.682402] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 254f6334165a4c16b591c3b1fa766183 [ 983.750316] env[62096]: DEBUG nova.network.neutron [None req-c3832ec5-7c3d-4059-9e29-89d5368c4b4f tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: 9ee34e47-6090-4617-a08c-0154874d4889] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 983.772669] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2395aa47-4411-425a-b957-8ff2c4322f3e {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.780517] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ef98378-ff4b-49be-a29a-9b3c6d7276f9 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.810588] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc7f9efe-9f0a-435c-ab82-70fa5a7d157a {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.817940] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b1a079e-168c-4ec4-9ae2-43c13968bfd8 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.830310] env[62096]: DEBUG nova.compute.provider_tree [None req-1f6cc430-d24d-425c-86fd-fb94cba13cee tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 983.830854] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1f6cc430-d24d-425c-86fd-fb94cba13cee tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg acf75c3b53fc4053988d5181094faf28 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 983.839358] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg acf75c3b53fc4053988d5181094faf28 [ 983.845548] env[62096]: DEBUG nova.network.neutron [None req-c3832ec5-7c3d-4059-9e29-89d5368c4b4f tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: 9ee34e47-6090-4617-a08c-0154874d4889] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 983.846069] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c3832ec5-7c3d-4059-9e29-89d5368c4b4f tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg 8321aabe42d244c19050d3c3459cd946 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 983.853993] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8321aabe42d244c19050d3c3459cd946 [ 984.171481] env[62096]: DEBUG oslo_concurrency.lockutils [None req-031e832e-abd3-4e5b-8f81-81280bf69042 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Lock "66af0854-df0c-47ae-9f97-c62c55bf6601" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.476s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 984.333768] env[62096]: DEBUG nova.scheduler.client.report [None req-1f6cc430-d24d-425c-86fd-fb94cba13cee tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 984.336300] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1f6cc430-d24d-425c-86fd-fb94cba13cee tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg 7af2122e60a44d10875bf6591703fb16 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 984.348059] env[62096]: DEBUG oslo_concurrency.lockutils [None req-c3832ec5-7c3d-4059-9e29-89d5368c4b4f tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Releasing lock "refresh_cache-9ee34e47-6090-4617-a08c-0154874d4889" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 984.348317] env[62096]: DEBUG nova.compute.manager [None req-c3832ec5-7c3d-4059-9e29-89d5368c4b4f tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 984.348502] env[62096]: DEBUG nova.compute.manager [None req-c3832ec5-7c3d-4059-9e29-89d5368c4b4f tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: 9ee34e47-6090-4617-a08c-0154874d4889] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 984.348664] env[62096]: DEBUG nova.network.neutron [None req-c3832ec5-7c3d-4059-9e29-89d5368c4b4f tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: 9ee34e47-6090-4617-a08c-0154874d4889] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 984.350656] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7af2122e60a44d10875bf6591703fb16 [ 984.363213] env[62096]: DEBUG nova.network.neutron [None req-c3832ec5-7c3d-4059-9e29-89d5368c4b4f tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: 9ee34e47-6090-4617-a08c-0154874d4889] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 984.363731] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c3832ec5-7c3d-4059-9e29-89d5368c4b4f tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg f507fa67af174568a6c0e799815770aa in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 984.369950] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f507fa67af174568a6c0e799815770aa [ 984.838850] env[62096]: DEBUG oslo_concurrency.lockutils [None req-1f6cc430-d24d-425c-86fd-fb94cba13cee tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.612s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 984.839456] env[62096]: ERROR nova.compute.manager [None req-1f6cc430-d24d-425c-86fd-fb94cba13cee tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 9cbfab3e-cd6c-4e31-a190-fbef55824076, please check neutron logs for more information. [ 984.839456] env[62096]: ERROR nova.compute.manager [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] Traceback (most recent call last): [ 984.839456] env[62096]: ERROR nova.compute.manager [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 984.839456] env[62096]: ERROR nova.compute.manager [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] self.driver.spawn(context, instance, image_meta, [ 984.839456] env[62096]: ERROR nova.compute.manager [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 984.839456] env[62096]: ERROR nova.compute.manager [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 984.839456] env[62096]: ERROR nova.compute.manager [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 984.839456] env[62096]: ERROR nova.compute.manager [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] vm_ref = self.build_virtual_machine(instance, [ 984.839456] env[62096]: ERROR nova.compute.manager [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 984.839456] env[62096]: ERROR nova.compute.manager [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] vif_infos = vmwarevif.get_vif_info(self._session, [ 984.839456] env[62096]: ERROR nova.compute.manager [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 984.839792] env[62096]: ERROR nova.compute.manager [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] for vif in network_info: [ 984.839792] env[62096]: ERROR nova.compute.manager [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 984.839792] env[62096]: ERROR nova.compute.manager [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] return self._sync_wrapper(fn, *args, **kwargs) [ 984.839792] env[62096]: ERROR nova.compute.manager [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 984.839792] env[62096]: ERROR nova.compute.manager [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] self.wait() [ 984.839792] env[62096]: ERROR nova.compute.manager [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 984.839792] env[62096]: ERROR nova.compute.manager [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] self[:] = self._gt.wait() [ 984.839792] env[62096]: ERROR nova.compute.manager [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 984.839792] env[62096]: ERROR nova.compute.manager [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] return self._exit_event.wait() [ 984.839792] env[62096]: ERROR nova.compute.manager [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 984.839792] env[62096]: ERROR nova.compute.manager [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] current.throw(*self._exc) [ 984.839792] env[62096]: ERROR nova.compute.manager [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 984.839792] env[62096]: ERROR nova.compute.manager [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] result = function(*args, **kwargs) [ 984.840172] env[62096]: ERROR nova.compute.manager [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 984.840172] env[62096]: ERROR nova.compute.manager [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] return func(*args, **kwargs) [ 984.840172] env[62096]: ERROR nova.compute.manager [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 984.840172] env[62096]: ERROR nova.compute.manager [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] raise e [ 984.840172] env[62096]: ERROR nova.compute.manager [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 984.840172] env[62096]: ERROR nova.compute.manager [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] nwinfo = self.network_api.allocate_for_instance( [ 984.840172] env[62096]: ERROR nova.compute.manager [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 984.840172] env[62096]: ERROR nova.compute.manager [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] created_port_ids = self._update_ports_for_instance( [ 984.840172] env[62096]: ERROR nova.compute.manager [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 984.840172] env[62096]: ERROR nova.compute.manager [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] with excutils.save_and_reraise_exception(): [ 984.840172] env[62096]: ERROR nova.compute.manager [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 984.840172] env[62096]: ERROR nova.compute.manager [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] self.force_reraise() [ 984.840172] env[62096]: ERROR nova.compute.manager [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 984.840519] env[62096]: ERROR nova.compute.manager [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] raise self.value [ 984.840519] env[62096]: ERROR nova.compute.manager [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 984.840519] env[62096]: ERROR nova.compute.manager [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] updated_port = self._update_port( [ 984.840519] env[62096]: ERROR nova.compute.manager [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 984.840519] env[62096]: ERROR nova.compute.manager [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] _ensure_no_port_binding_failure(port) [ 984.840519] env[62096]: ERROR nova.compute.manager [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 984.840519] env[62096]: ERROR nova.compute.manager [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] raise exception.PortBindingFailed(port_id=port['id']) [ 984.840519] env[62096]: ERROR nova.compute.manager [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] nova.exception.PortBindingFailed: Binding failed for port 9cbfab3e-cd6c-4e31-a190-fbef55824076, please check neutron logs for more information. [ 984.840519] env[62096]: ERROR nova.compute.manager [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] [ 984.840519] env[62096]: DEBUG nova.compute.utils [None req-1f6cc430-d24d-425c-86fd-fb94cba13cee tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] Binding failed for port 9cbfab3e-cd6c-4e31-a190-fbef55824076, please check neutron logs for more information. {{(pid=62096) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 984.841901] env[62096]: DEBUG nova.compute.manager [None req-1f6cc430-d24d-425c-86fd-fb94cba13cee tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] Build of instance adea4ebf-6b22-4fd2-b601-bad448e4bbc5 was re-scheduled: Binding failed for port 9cbfab3e-cd6c-4e31-a190-fbef55824076, please check neutron logs for more information. {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 984.842311] env[62096]: DEBUG nova.compute.manager [None req-1f6cc430-d24d-425c-86fd-fb94cba13cee tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] Unplugging VIFs for instance {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 984.842545] env[62096]: DEBUG oslo_concurrency.lockutils [None req-1f6cc430-d24d-425c-86fd-fb94cba13cee tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Acquiring lock "refresh_cache-adea4ebf-6b22-4fd2-b601-bad448e4bbc5" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 984.842689] env[62096]: DEBUG oslo_concurrency.lockutils [None req-1f6cc430-d24d-425c-86fd-fb94cba13cee tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Acquired lock "refresh_cache-adea4ebf-6b22-4fd2-b601-bad448e4bbc5" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 984.842843] env[62096]: DEBUG nova.network.neutron [None req-1f6cc430-d24d-425c-86fd-fb94cba13cee tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 984.843245] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1f6cc430-d24d-425c-86fd-fb94cba13cee tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg 262f1d7ae65b4a4d927a3c6e18707859 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 984.849490] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 262f1d7ae65b4a4d927a3c6e18707859 [ 984.866575] env[62096]: DEBUG nova.network.neutron [None req-c3832ec5-7c3d-4059-9e29-89d5368c4b4f tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: 9ee34e47-6090-4617-a08c-0154874d4889] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 984.867158] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c3832ec5-7c3d-4059-9e29-89d5368c4b4f tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg b8f5c8cdc85d43ab802921a45c1ed20c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 984.875381] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b8f5c8cdc85d43ab802921a45c1ed20c [ 985.359845] env[62096]: DEBUG nova.network.neutron [None req-1f6cc430-d24d-425c-86fd-fb94cba13cee tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 985.370191] env[62096]: INFO nova.compute.manager [None req-c3832ec5-7c3d-4059-9e29-89d5368c4b4f tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: 9ee34e47-6090-4617-a08c-0154874d4889] Took 1.02 seconds to deallocate network for instance. [ 985.371925] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c3832ec5-7c3d-4059-9e29-89d5368c4b4f tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg 8ad2c796f275476abfc62d697e4f7704 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 985.406430] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8ad2c796f275476abfc62d697e4f7704 [ 985.443974] env[62096]: DEBUG nova.network.neutron [None req-1f6cc430-d24d-425c-86fd-fb94cba13cee tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 985.444655] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1f6cc430-d24d-425c-86fd-fb94cba13cee tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg ca20f6c2f17d469ea60a70e3c19117e2 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 985.454433] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ca20f6c2f17d469ea60a70e3c19117e2 [ 985.515789] env[62096]: DEBUG oslo_concurrency.lockutils [None req-4c152c89-0e02-4195-99d0-8f9cd162e35b tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Acquiring lock "3266e07d-a297-49e4-945c-a4da92b81d14" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 985.516077] env[62096]: DEBUG oslo_concurrency.lockutils [None req-4c152c89-0e02-4195-99d0-8f9cd162e35b tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Lock "3266e07d-a297-49e4-945c-a4da92b81d14" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 985.516576] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4c152c89-0e02-4195-99d0-8f9cd162e35b tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg f56b4f2bda7242e5baea1f9ea36a8422 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 985.526456] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f56b4f2bda7242e5baea1f9ea36a8422 [ 985.876593] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c3832ec5-7c3d-4059-9e29-89d5368c4b4f tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg 9abfdcaa676a471a8fc4f964d26b937f in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 985.906694] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9abfdcaa676a471a8fc4f964d26b937f [ 985.948492] env[62096]: DEBUG oslo_concurrency.lockutils [None req-1f6cc430-d24d-425c-86fd-fb94cba13cee tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Releasing lock "refresh_cache-adea4ebf-6b22-4fd2-b601-bad448e4bbc5" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 985.948611] env[62096]: DEBUG nova.compute.manager [None req-1f6cc430-d24d-425c-86fd-fb94cba13cee tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 985.948857] env[62096]: DEBUG nova.compute.manager [None req-1f6cc430-d24d-425c-86fd-fb94cba13cee tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 985.949028] env[62096]: DEBUG nova.network.neutron [None req-1f6cc430-d24d-425c-86fd-fb94cba13cee tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 985.968101] env[62096]: DEBUG nova.network.neutron [None req-1f6cc430-d24d-425c-86fd-fb94cba13cee tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 985.968837] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1f6cc430-d24d-425c-86fd-fb94cba13cee tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg 4c344c059be144c382de368ca21154bc in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 985.975972] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4c344c059be144c382de368ca21154bc [ 986.018455] env[62096]: DEBUG nova.compute.manager [None req-4c152c89-0e02-4195-99d0-8f9cd162e35b tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] Starting instance... {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 986.020220] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4c152c89-0e02-4195-99d0-8f9cd162e35b tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg da19aa05e8f3482ba10eb78f8cfe092a in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 986.059153] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg da19aa05e8f3482ba10eb78f8cfe092a [ 986.400867] env[62096]: INFO nova.scheduler.client.report [None req-c3832ec5-7c3d-4059-9e29-89d5368c4b4f tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Deleted allocations for instance 9ee34e47-6090-4617-a08c-0154874d4889 [ 986.407029] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-c3832ec5-7c3d-4059-9e29-89d5368c4b4f tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg f7175fe1b3c848489de084c28f73e65c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 986.417048] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f7175fe1b3c848489de084c28f73e65c [ 986.471635] env[62096]: DEBUG nova.network.neutron [None req-1f6cc430-d24d-425c-86fd-fb94cba13cee tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 986.472255] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1f6cc430-d24d-425c-86fd-fb94cba13cee tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg c631a59ec17442deab4d8eb7793faebc in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 986.478506] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c631a59ec17442deab4d8eb7793faebc [ 986.537689] env[62096]: DEBUG oslo_concurrency.lockutils [None req-4c152c89-0e02-4195-99d0-8f9cd162e35b tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 986.537959] env[62096]: DEBUG oslo_concurrency.lockutils [None req-4c152c89-0e02-4195-99d0-8f9cd162e35b tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 986.539374] env[62096]: INFO nova.compute.claims [None req-4c152c89-0e02-4195-99d0-8f9cd162e35b tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 986.541186] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4c152c89-0e02-4195-99d0-8f9cd162e35b tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg a685bd601c59446d8c795569fe479b08 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 986.573085] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a685bd601c59446d8c795569fe479b08 [ 986.909706] env[62096]: DEBUG oslo_concurrency.lockutils [None req-c3832ec5-7c3d-4059-9e29-89d5368c4b4f tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Lock "9ee34e47-6090-4617-a08c-0154874d4889" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.719s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 986.974373] env[62096]: INFO nova.compute.manager [None req-1f6cc430-d24d-425c-86fd-fb94cba13cee tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] [instance: adea4ebf-6b22-4fd2-b601-bad448e4bbc5] Took 1.03 seconds to deallocate network for instance. [ 986.976134] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1f6cc430-d24d-425c-86fd-fb94cba13cee tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg 8e96bd832f1c4cf7b744f4ea5c7f47ac in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 987.012572] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8e96bd832f1c4cf7b744f4ea5c7f47ac [ 987.044275] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4c152c89-0e02-4195-99d0-8f9cd162e35b tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg eb809fa5fd9a49718eff027bbc6bdb28 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 987.051943] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg eb809fa5fd9a49718eff027bbc6bdb28 [ 987.480826] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1f6cc430-d24d-425c-86fd-fb94cba13cee tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg 461cc7dbdb374bb3ac744df4f4b1e6b9 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 987.509521] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 461cc7dbdb374bb3ac744df4f4b1e6b9 [ 987.592398] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-733ad4d3-4579-4cc6-abb6-98e32ca8472e {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.601104] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32e3d4b0-f533-4cae-9fc6-22b2f6c093f0 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.630299] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33cf36a7-03f4-4ff7-8a74-f9c62acf1a19 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.636892] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c62ec54-551e-46db-b76b-bf9a5ed8ab2f {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.649446] env[62096]: DEBUG nova.compute.provider_tree [None req-4c152c89-0e02-4195-99d0-8f9cd162e35b tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 987.649925] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4c152c89-0e02-4195-99d0-8f9cd162e35b tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg 8b310901880a4bf6853aaae0ed661537 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 987.656839] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8b310901880a4bf6853aaae0ed661537 [ 987.928650] env[62096]: DEBUG oslo_concurrency.lockutils [None req-8b6dbe28-b974-4383-b12f-9100c9a10226 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Acquiring lock "82731039-b916-47bb-b00f-b7f7785d9fc3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 987.928967] env[62096]: DEBUG oslo_concurrency.lockutils [None req-8b6dbe28-b974-4383-b12f-9100c9a10226 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Lock "82731039-b916-47bb-b00f-b7f7785d9fc3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 987.929370] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-8b6dbe28-b974-4383-b12f-9100c9a10226 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg 3abd442da2674549bce21553b5f0120b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 987.938605] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3abd442da2674549bce21553b5f0120b [ 988.000064] env[62096]: INFO nova.scheduler.client.report [None req-1f6cc430-d24d-425c-86fd-fb94cba13cee tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Deleted allocations for instance adea4ebf-6b22-4fd2-b601-bad448e4bbc5 [ 988.006398] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-1f6cc430-d24d-425c-86fd-fb94cba13cee tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Expecting reply to msg dd9339eba8ba4a22af1e83be31fb27d3 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 988.023170] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dd9339eba8ba4a22af1e83be31fb27d3 [ 988.153105] env[62096]: DEBUG nova.scheduler.client.report [None req-4c152c89-0e02-4195-99d0-8f9cd162e35b tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 988.155548] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4c152c89-0e02-4195-99d0-8f9cd162e35b tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg ed8a77197a064a73bfc6b2912b4fb996 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 988.165714] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ed8a77197a064a73bfc6b2912b4fb996 [ 988.431444] env[62096]: DEBUG nova.compute.manager [None req-8b6dbe28-b974-4383-b12f-9100c9a10226 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] Starting instance... {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 988.434008] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-8b6dbe28-b974-4383-b12f-9100c9a10226 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg 4cf208c5830643d680c1e810c9d1a8cb in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 988.464747] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4cf208c5830643d680c1e810c9d1a8cb [ 988.508722] env[62096]: DEBUG oslo_concurrency.lockutils [None req-1f6cc430-d24d-425c-86fd-fb94cba13cee tempest-ServerDiskConfigTestJSON-884909396 tempest-ServerDiskConfigTestJSON-884909396-project-member] Lock "adea4ebf-6b22-4fd2-b601-bad448e4bbc5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.152s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 988.658602] env[62096]: DEBUG oslo_concurrency.lockutils [None req-4c152c89-0e02-4195-99d0-8f9cd162e35b tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.120s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 988.659236] env[62096]: DEBUG nova.compute.manager [None req-4c152c89-0e02-4195-99d0-8f9cd162e35b tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] Start building networks asynchronously for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 988.661011] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4c152c89-0e02-4195-99d0-8f9cd162e35b tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg 7bfeb1f9e1db4a7a908e60ff21211c3e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 988.690694] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7bfeb1f9e1db4a7a908e60ff21211c3e [ 988.951106] env[62096]: DEBUG oslo_concurrency.lockutils [None req-8b6dbe28-b974-4383-b12f-9100c9a10226 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 988.951373] env[62096]: DEBUG oslo_concurrency.lockutils [None req-8b6dbe28-b974-4383-b12f-9100c9a10226 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 988.952848] env[62096]: INFO nova.compute.claims [None req-8b6dbe28-b974-4383-b12f-9100c9a10226 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 988.954383] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-8b6dbe28-b974-4383-b12f-9100c9a10226 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg acace565b5aa435ea3688af086254a95 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 988.985381] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg acace565b5aa435ea3688af086254a95 [ 989.164341] env[62096]: DEBUG nova.compute.utils [None req-4c152c89-0e02-4195-99d0-8f9cd162e35b tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Using /dev/sd instead of None {{(pid=62096) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 989.164955] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4c152c89-0e02-4195-99d0-8f9cd162e35b tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg 924a440f29ce4a6cab0c5f1152a7be3f in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 989.165916] env[62096]: DEBUG nova.compute.manager [None req-4c152c89-0e02-4195-99d0-8f9cd162e35b tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] Allocating IP information in the background. {{(pid=62096) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 989.166085] env[62096]: DEBUG nova.network.neutron [None req-4c152c89-0e02-4195-99d0-8f9cd162e35b tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] allocate_for_instance() {{(pid=62096) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 989.175409] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 924a440f29ce4a6cab0c5f1152a7be3f [ 989.318668] env[62096]: DEBUG nova.policy [None req-4c152c89-0e02-4195-99d0-8f9cd162e35b tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '491a2069427f43e79347a70e475e4dd2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e45573130e8e4ce4979b37e1b4c5af9f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62096) authorize /opt/stack/nova/nova/policy.py:203}} [ 989.458696] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-8b6dbe28-b974-4383-b12f-9100c9a10226 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg 35ff40048be64dda85c994251b57bdbe in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 989.467094] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 35ff40048be64dda85c994251b57bdbe [ 989.668971] env[62096]: DEBUG nova.compute.manager [None req-4c152c89-0e02-4195-99d0-8f9cd162e35b tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] Start building block device mappings for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 989.670628] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4c152c89-0e02-4195-99d0-8f9cd162e35b tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg 0a59ac4c208147d6b77422e163a3d154 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 989.702206] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0a59ac4c208147d6b77422e163a3d154 [ 989.763563] env[62096]: DEBUG nova.network.neutron [None req-4c152c89-0e02-4195-99d0-8f9cd162e35b tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] Successfully created port: c9a90464-f5be-447e-84d8-be11d5cc35ae {{(pid=62096) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 990.000407] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47ebf1a9-1d18-4e56-a7ca-163366f3e464 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.008162] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12bd1bcb-37e9-4cf2-bb09-d741bf89556c {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.040621] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a26c0074-d313-4929-82d5-c3ecd4932ec4 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.047685] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02f0bc37-baca-4948-a8e5-281faf5bc241 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.063615] env[62096]: DEBUG nova.compute.provider_tree [None req-8b6dbe28-b974-4383-b12f-9100c9a10226 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 990.064108] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-8b6dbe28-b974-4383-b12f-9100c9a10226 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg a0003f9f21294c71aa5cda12c4796e85 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 990.072531] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a0003f9f21294c71aa5cda12c4796e85 [ 990.176118] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4c152c89-0e02-4195-99d0-8f9cd162e35b tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg 104e7d4b711c48268f57d9696d2e18f0 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 990.214297] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 104e7d4b711c48268f57d9696d2e18f0 [ 990.567114] env[62096]: DEBUG nova.scheduler.client.report [None req-8b6dbe28-b974-4383-b12f-9100c9a10226 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 990.569392] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-8b6dbe28-b974-4383-b12f-9100c9a10226 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg b37faa1e14fe46c0aff391b0c6ff196c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 990.583917] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b37faa1e14fe46c0aff391b0c6ff196c [ 990.678806] env[62096]: DEBUG nova.compute.manager [None req-4c152c89-0e02-4195-99d0-8f9cd162e35b tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] Start spawning the instance on the hypervisor. {{(pid=62096) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 990.703556] env[62096]: DEBUG nova.virt.hardware [None req-4c152c89-0e02-4195-99d0-8f9cd162e35b tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-19T11:52:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-19T11:51:55Z,direct_url=,disk_format='vmdk',id=fb764baa-3805-45c4-a694-aa91b0932110,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bd76085d004b408bbcf1ebf76ba2593e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-19T11:51:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 990.703802] env[62096]: DEBUG nova.virt.hardware [None req-4c152c89-0e02-4195-99d0-8f9cd162e35b tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Flavor limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 990.703959] env[62096]: DEBUG nova.virt.hardware [None req-4c152c89-0e02-4195-99d0-8f9cd162e35b tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Image limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 990.704184] env[62096]: DEBUG nova.virt.hardware [None req-4c152c89-0e02-4195-99d0-8f9cd162e35b tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Flavor pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 990.704339] env[62096]: DEBUG nova.virt.hardware [None req-4c152c89-0e02-4195-99d0-8f9cd162e35b tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Image pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 990.704485] env[62096]: DEBUG nova.virt.hardware [None req-4c152c89-0e02-4195-99d0-8f9cd162e35b tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 990.704689] env[62096]: DEBUG nova.virt.hardware [None req-4c152c89-0e02-4195-99d0-8f9cd162e35b tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 990.704844] env[62096]: DEBUG nova.virt.hardware [None req-4c152c89-0e02-4195-99d0-8f9cd162e35b tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 990.705030] env[62096]: DEBUG nova.virt.hardware [None req-4c152c89-0e02-4195-99d0-8f9cd162e35b tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Got 1 possible topologies {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 990.705198] env[62096]: DEBUG nova.virt.hardware [None req-4c152c89-0e02-4195-99d0-8f9cd162e35b tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 990.705364] env[62096]: DEBUG nova.virt.hardware [None req-4c152c89-0e02-4195-99d0-8f9cd162e35b tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 990.706226] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c880e253-4760-484b-9e0d-7301c6e438e9 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.715594] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-326c207a-6773-4943-ad31-cd644f4c8be0 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.797902] env[62096]: DEBUG nova.compute.manager [req-f6027ade-7d7d-4a24-a33a-d202c8a8d82c req-926e8e21-2641-4a56-b9c8-d9a35df96de1 service nova] [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] Received event network-changed-c9a90464-f5be-447e-84d8-be11d5cc35ae {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 990.797902] env[62096]: DEBUG nova.compute.manager [req-f6027ade-7d7d-4a24-a33a-d202c8a8d82c req-926e8e21-2641-4a56-b9c8-d9a35df96de1 service nova] [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] Refreshing instance network info cache due to event network-changed-c9a90464-f5be-447e-84d8-be11d5cc35ae. {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 990.798106] env[62096]: DEBUG oslo_concurrency.lockutils [req-f6027ade-7d7d-4a24-a33a-d202c8a8d82c req-926e8e21-2641-4a56-b9c8-d9a35df96de1 service nova] Acquiring lock "refresh_cache-3266e07d-a297-49e4-945c-a4da92b81d14" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 990.798226] env[62096]: DEBUG oslo_concurrency.lockutils [req-f6027ade-7d7d-4a24-a33a-d202c8a8d82c req-926e8e21-2641-4a56-b9c8-d9a35df96de1 service nova] Acquired lock "refresh_cache-3266e07d-a297-49e4-945c-a4da92b81d14" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 990.798386] env[62096]: DEBUG nova.network.neutron [req-f6027ade-7d7d-4a24-a33a-d202c8a8d82c req-926e8e21-2641-4a56-b9c8-d9a35df96de1 service nova] [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] Refreshing network info cache for port c9a90464-f5be-447e-84d8-be11d5cc35ae {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 990.798801] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-f6027ade-7d7d-4a24-a33a-d202c8a8d82c req-926e8e21-2641-4a56-b9c8-d9a35df96de1 service nova] Expecting reply to msg 841135c70717476ba2f38527a646c606 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 990.805365] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 841135c70717476ba2f38527a646c606 [ 990.969980] env[62096]: ERROR nova.compute.manager [None req-4c152c89-0e02-4195-99d0-8f9cd162e35b tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port c9a90464-f5be-447e-84d8-be11d5cc35ae, please check neutron logs for more information. [ 990.969980] env[62096]: ERROR nova.compute.manager Traceback (most recent call last): [ 990.969980] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 990.969980] env[62096]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 990.969980] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 990.969980] env[62096]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 990.969980] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 990.969980] env[62096]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 990.969980] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 990.969980] env[62096]: ERROR nova.compute.manager self.force_reraise() [ 990.969980] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 990.969980] env[62096]: ERROR nova.compute.manager raise self.value [ 990.969980] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 990.969980] env[62096]: ERROR nova.compute.manager updated_port = self._update_port( [ 990.969980] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 990.969980] env[62096]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 990.970598] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 990.970598] env[62096]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 990.970598] env[62096]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port c9a90464-f5be-447e-84d8-be11d5cc35ae, please check neutron logs for more information. [ 990.970598] env[62096]: ERROR nova.compute.manager [ 990.970598] env[62096]: Traceback (most recent call last): [ 990.970598] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 990.970598] env[62096]: listener.cb(fileno) [ 990.970598] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 990.970598] env[62096]: result = function(*args, **kwargs) [ 990.970598] env[62096]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 990.970598] env[62096]: return func(*args, **kwargs) [ 990.970598] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 990.970598] env[62096]: raise e [ 990.970598] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 990.970598] env[62096]: nwinfo = self.network_api.allocate_for_instance( [ 990.970598] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 990.970598] env[62096]: created_port_ids = self._update_ports_for_instance( [ 990.970598] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 990.970598] env[62096]: with excutils.save_and_reraise_exception(): [ 990.970598] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 990.970598] env[62096]: self.force_reraise() [ 990.970598] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 990.970598] env[62096]: raise self.value [ 990.970598] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 990.970598] env[62096]: updated_port = self._update_port( [ 990.970598] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 990.970598] env[62096]: _ensure_no_port_binding_failure(port) [ 990.970598] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 990.970598] env[62096]: raise exception.PortBindingFailed(port_id=port['id']) [ 990.971558] env[62096]: nova.exception.PortBindingFailed: Binding failed for port c9a90464-f5be-447e-84d8-be11d5cc35ae, please check neutron logs for more information. [ 990.971558] env[62096]: Removing descriptor: 14 [ 990.971558] env[62096]: ERROR nova.compute.manager [None req-4c152c89-0e02-4195-99d0-8f9cd162e35b tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port c9a90464-f5be-447e-84d8-be11d5cc35ae, please check neutron logs for more information. [ 990.971558] env[62096]: ERROR nova.compute.manager [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] Traceback (most recent call last): [ 990.971558] env[62096]: ERROR nova.compute.manager [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 990.971558] env[62096]: ERROR nova.compute.manager [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] yield resources [ 990.971558] env[62096]: ERROR nova.compute.manager [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 990.971558] env[62096]: ERROR nova.compute.manager [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] self.driver.spawn(context, instance, image_meta, [ 990.971558] env[62096]: ERROR nova.compute.manager [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 990.971558] env[62096]: ERROR nova.compute.manager [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] self._vmops.spawn(context, instance, image_meta, injected_files, [ 990.971558] env[62096]: ERROR nova.compute.manager [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 990.971558] env[62096]: ERROR nova.compute.manager [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] vm_ref = self.build_virtual_machine(instance, [ 990.971994] env[62096]: ERROR nova.compute.manager [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 990.971994] env[62096]: ERROR nova.compute.manager [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] vif_infos = vmwarevif.get_vif_info(self._session, [ 990.971994] env[62096]: ERROR nova.compute.manager [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 990.971994] env[62096]: ERROR nova.compute.manager [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] for vif in network_info: [ 990.971994] env[62096]: ERROR nova.compute.manager [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 990.971994] env[62096]: ERROR nova.compute.manager [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] return self._sync_wrapper(fn, *args, **kwargs) [ 990.971994] env[62096]: ERROR nova.compute.manager [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 990.971994] env[62096]: ERROR nova.compute.manager [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] self.wait() [ 990.971994] env[62096]: ERROR nova.compute.manager [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 990.971994] env[62096]: ERROR nova.compute.manager [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] self[:] = self._gt.wait() [ 990.971994] env[62096]: ERROR nova.compute.manager [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 990.971994] env[62096]: ERROR nova.compute.manager [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] return self._exit_event.wait() [ 990.971994] env[62096]: ERROR nova.compute.manager [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 990.972446] env[62096]: ERROR nova.compute.manager [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] result = hub.switch() [ 990.972446] env[62096]: ERROR nova.compute.manager [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 990.972446] env[62096]: ERROR nova.compute.manager [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] return self.greenlet.switch() [ 990.972446] env[62096]: ERROR nova.compute.manager [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 990.972446] env[62096]: ERROR nova.compute.manager [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] result = function(*args, **kwargs) [ 990.972446] env[62096]: ERROR nova.compute.manager [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 990.972446] env[62096]: ERROR nova.compute.manager [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] return func(*args, **kwargs) [ 990.972446] env[62096]: ERROR nova.compute.manager [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 990.972446] env[62096]: ERROR nova.compute.manager [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] raise e [ 990.972446] env[62096]: ERROR nova.compute.manager [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 990.972446] env[62096]: ERROR nova.compute.manager [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] nwinfo = self.network_api.allocate_for_instance( [ 990.972446] env[62096]: ERROR nova.compute.manager [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 990.972446] env[62096]: ERROR nova.compute.manager [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] created_port_ids = self._update_ports_for_instance( [ 990.972892] env[62096]: ERROR nova.compute.manager [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 990.972892] env[62096]: ERROR nova.compute.manager [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] with excutils.save_and_reraise_exception(): [ 990.972892] env[62096]: ERROR nova.compute.manager [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 990.972892] env[62096]: ERROR nova.compute.manager [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] self.force_reraise() [ 990.972892] env[62096]: ERROR nova.compute.manager [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 990.972892] env[62096]: ERROR nova.compute.manager [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] raise self.value [ 990.972892] env[62096]: ERROR nova.compute.manager [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 990.972892] env[62096]: ERROR nova.compute.manager [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] updated_port = self._update_port( [ 990.972892] env[62096]: ERROR nova.compute.manager [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 990.972892] env[62096]: ERROR nova.compute.manager [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] _ensure_no_port_binding_failure(port) [ 990.972892] env[62096]: ERROR nova.compute.manager [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 990.972892] env[62096]: ERROR nova.compute.manager [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] raise exception.PortBindingFailed(port_id=port['id']) [ 990.973311] env[62096]: ERROR nova.compute.manager [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] nova.exception.PortBindingFailed: Binding failed for port c9a90464-f5be-447e-84d8-be11d5cc35ae, please check neutron logs for more information. [ 990.973311] env[62096]: ERROR nova.compute.manager [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] [ 990.973311] env[62096]: INFO nova.compute.manager [None req-4c152c89-0e02-4195-99d0-8f9cd162e35b tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] Terminating instance [ 990.973417] env[62096]: DEBUG oslo_concurrency.lockutils [None req-4c152c89-0e02-4195-99d0-8f9cd162e35b tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Acquiring lock "refresh_cache-3266e07d-a297-49e4-945c-a4da92b81d14" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 991.072610] env[62096]: DEBUG oslo_concurrency.lockutils [None req-8b6dbe28-b974-4383-b12f-9100c9a10226 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.121s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 991.073166] env[62096]: DEBUG nova.compute.manager [None req-8b6dbe28-b974-4383-b12f-9100c9a10226 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] Start building networks asynchronously for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 991.074958] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-8b6dbe28-b974-4383-b12f-9100c9a10226 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg 35d59c98270941afb3c76cac80b84f7e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 991.112713] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 35d59c98270941afb3c76cac80b84f7e [ 991.315529] env[62096]: DEBUG nova.network.neutron [req-f6027ade-7d7d-4a24-a33a-d202c8a8d82c req-926e8e21-2641-4a56-b9c8-d9a35df96de1 service nova] [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 991.399744] env[62096]: DEBUG nova.network.neutron [req-f6027ade-7d7d-4a24-a33a-d202c8a8d82c req-926e8e21-2641-4a56-b9c8-d9a35df96de1 service nova] [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 991.400300] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-f6027ade-7d7d-4a24-a33a-d202c8a8d82c req-926e8e21-2641-4a56-b9c8-d9a35df96de1 service nova] Expecting reply to msg 6f3f6c576c644401b9252b4aff7156e6 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 991.409729] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6f3f6c576c644401b9252b4aff7156e6 [ 991.586487] env[62096]: DEBUG nova.compute.utils [None req-8b6dbe28-b974-4383-b12f-9100c9a10226 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Using /dev/sd instead of None {{(pid=62096) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 991.587112] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-8b6dbe28-b974-4383-b12f-9100c9a10226 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg 0ae343e776114d9face70fd079c17221 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 991.588624] env[62096]: DEBUG nova.compute.manager [None req-8b6dbe28-b974-4383-b12f-9100c9a10226 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] Allocating IP information in the background. {{(pid=62096) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 991.588796] env[62096]: DEBUG nova.network.neutron [None req-8b6dbe28-b974-4383-b12f-9100c9a10226 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] allocate_for_instance() {{(pid=62096) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 991.597632] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0ae343e776114d9face70fd079c17221 [ 991.627908] env[62096]: DEBUG nova.policy [None req-8b6dbe28-b974-4383-b12f-9100c9a10226 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '09a4673f033c4d139efe4cd9ba4b7560', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd81a5a37184d4b29ad6df7e77dfd3ee4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62096) authorize /opt/stack/nova/nova/policy.py:203}} [ 991.865447] env[62096]: DEBUG nova.network.neutron [None req-8b6dbe28-b974-4383-b12f-9100c9a10226 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] Successfully created port: db55597d-723e-430a-b215-c9cfa9baa850 {{(pid=62096) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 991.902271] env[62096]: DEBUG oslo_concurrency.lockutils [req-f6027ade-7d7d-4a24-a33a-d202c8a8d82c req-926e8e21-2641-4a56-b9c8-d9a35df96de1 service nova] Releasing lock "refresh_cache-3266e07d-a297-49e4-945c-a4da92b81d14" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 991.902775] env[62096]: DEBUG oslo_concurrency.lockutils [None req-4c152c89-0e02-4195-99d0-8f9cd162e35b tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Acquired lock "refresh_cache-3266e07d-a297-49e4-945c-a4da92b81d14" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 991.902971] env[62096]: DEBUG nova.network.neutron [None req-4c152c89-0e02-4195-99d0-8f9cd162e35b tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 991.903413] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4c152c89-0e02-4195-99d0-8f9cd162e35b tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg ae6220dc609947e7b9c3911bc78f7f08 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 991.912714] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ae6220dc609947e7b9c3911bc78f7f08 [ 992.089251] env[62096]: DEBUG nova.compute.manager [None req-8b6dbe28-b974-4383-b12f-9100c9a10226 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] Start building block device mappings for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 992.091246] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-8b6dbe28-b974-4383-b12f-9100c9a10226 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg 635ae0a3824b483b97304fdc2eb927e6 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 992.130644] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 635ae0a3824b483b97304fdc2eb927e6 [ 992.435943] env[62096]: DEBUG nova.network.neutron [None req-4c152c89-0e02-4195-99d0-8f9cd162e35b tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 992.514771] env[62096]: DEBUG nova.network.neutron [None req-4c152c89-0e02-4195-99d0-8f9cd162e35b tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 992.514771] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4c152c89-0e02-4195-99d0-8f9cd162e35b tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg 51ba181aa0da4c639392b17b39242c38 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 992.522184] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 51ba181aa0da4c639392b17b39242c38 [ 992.597387] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-8b6dbe28-b974-4383-b12f-9100c9a10226 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg 88dd2ef512e741918686617b9be97197 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 992.599476] env[62096]: ERROR nova.compute.manager [None req-8b6dbe28-b974-4383-b12f-9100c9a10226 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port db55597d-723e-430a-b215-c9cfa9baa850, please check neutron logs for more information. [ 992.599476] env[62096]: ERROR nova.compute.manager Traceback (most recent call last): [ 992.599476] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 992.599476] env[62096]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 992.599476] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 992.599476] env[62096]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 992.599476] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 992.599476] env[62096]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 992.599476] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 992.599476] env[62096]: ERROR nova.compute.manager self.force_reraise() [ 992.599476] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 992.599476] env[62096]: ERROR nova.compute.manager raise self.value [ 992.599476] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 992.599476] env[62096]: ERROR nova.compute.manager updated_port = self._update_port( [ 992.599476] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 992.599476] env[62096]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 992.600057] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 992.600057] env[62096]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 992.600057] env[62096]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port db55597d-723e-430a-b215-c9cfa9baa850, please check neutron logs for more information. [ 992.600057] env[62096]: ERROR nova.compute.manager [ 992.600389] env[62096]: Traceback (most recent call last): [ 992.600489] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 992.600489] env[62096]: listener.cb(fileno) [ 992.600582] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 992.600582] env[62096]: result = function(*args, **kwargs) [ 992.600664] env[62096]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 992.600664] env[62096]: return func(*args, **kwargs) [ 992.600741] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 992.600741] env[62096]: raise e [ 992.600816] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 992.600816] env[62096]: nwinfo = self.network_api.allocate_for_instance( [ 992.600892] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 992.600892] env[62096]: created_port_ids = self._update_ports_for_instance( [ 992.600971] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 992.600971] env[62096]: with excutils.save_and_reraise_exception(): [ 992.601047] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 992.601047] env[62096]: self.force_reraise() [ 992.601121] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 992.601121] env[62096]: raise self.value [ 992.601201] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 992.601201] env[62096]: updated_port = self._update_port( [ 992.601277] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 992.601277] env[62096]: _ensure_no_port_binding_failure(port) [ 992.601355] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 992.601355] env[62096]: raise exception.PortBindingFailed(port_id=port['id']) [ 992.601437] env[62096]: nova.exception.PortBindingFailed: Binding failed for port db55597d-723e-430a-b215-c9cfa9baa850, please check neutron logs for more information. [ 992.601493] env[62096]: Removing descriptor: 14 [ 992.629214] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 88dd2ef512e741918686617b9be97197 [ 992.835700] env[62096]: DEBUG nova.compute.manager [req-dfd94e92-509a-4dab-a2ae-cf0926899a7b req-046b7d18-6432-48e6-847f-56d6d9949ba0 service nova] [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] Received event network-vif-deleted-c9a90464-f5be-447e-84d8-be11d5cc35ae {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 992.836090] env[62096]: DEBUG nova.compute.manager [req-dfd94e92-509a-4dab-a2ae-cf0926899a7b req-046b7d18-6432-48e6-847f-56d6d9949ba0 service nova] [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] Received event network-changed-db55597d-723e-430a-b215-c9cfa9baa850 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 992.836583] env[62096]: DEBUG nova.compute.manager [req-dfd94e92-509a-4dab-a2ae-cf0926899a7b req-046b7d18-6432-48e6-847f-56d6d9949ba0 service nova] [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] Refreshing instance network info cache due to event network-changed-db55597d-723e-430a-b215-c9cfa9baa850. {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 992.836972] env[62096]: DEBUG oslo_concurrency.lockutils [req-dfd94e92-509a-4dab-a2ae-cf0926899a7b req-046b7d18-6432-48e6-847f-56d6d9949ba0 service nova] Acquiring lock "refresh_cache-82731039-b916-47bb-b00f-b7f7785d9fc3" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 992.837249] env[62096]: DEBUG oslo_concurrency.lockutils [req-dfd94e92-509a-4dab-a2ae-cf0926899a7b req-046b7d18-6432-48e6-847f-56d6d9949ba0 service nova] Acquired lock "refresh_cache-82731039-b916-47bb-b00f-b7f7785d9fc3" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 992.837533] env[62096]: DEBUG nova.network.neutron [req-dfd94e92-509a-4dab-a2ae-cf0926899a7b req-046b7d18-6432-48e6-847f-56d6d9949ba0 service nova] [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] Refreshing network info cache for port db55597d-723e-430a-b215-c9cfa9baa850 {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 992.838057] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-dfd94e92-509a-4dab-a2ae-cf0926899a7b req-046b7d18-6432-48e6-847f-56d6d9949ba0 service nova] Expecting reply to msg 6da491fb0c2e4f999df0624812047741 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 992.844894] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6da491fb0c2e4f999df0624812047741 [ 993.016677] env[62096]: DEBUG oslo_concurrency.lockutils [None req-4c152c89-0e02-4195-99d0-8f9cd162e35b tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Releasing lock "refresh_cache-3266e07d-a297-49e4-945c-a4da92b81d14" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 993.017342] env[62096]: DEBUG nova.compute.manager [None req-4c152c89-0e02-4195-99d0-8f9cd162e35b tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] Start destroying the instance on the hypervisor. {{(pid=62096) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 993.017679] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-4c152c89-0e02-4195-99d0-8f9cd162e35b tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] Destroying instance {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 993.018096] env[62096]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-49e1b126-f6dd-49d8-aeab-c34169114a46 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.028770] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ee9b78f-9f07-4dc8-9394-591dcab6b6f3 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.049245] env[62096]: WARNING nova.virt.vmwareapi.vmops [None req-4c152c89-0e02-4195-99d0-8f9cd162e35b tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 3266e07d-a297-49e4-945c-a4da92b81d14 could not be found. [ 993.049430] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-4c152c89-0e02-4195-99d0-8f9cd162e35b tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] Instance destroyed {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 993.049608] env[62096]: INFO nova.compute.manager [None req-4c152c89-0e02-4195-99d0-8f9cd162e35b tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] Took 0.03 seconds to destroy the instance on the hypervisor. [ 993.049848] env[62096]: DEBUG oslo.service.loopingcall [None req-4c152c89-0e02-4195-99d0-8f9cd162e35b tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 993.050063] env[62096]: DEBUG nova.compute.manager [-] [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 993.050158] env[62096]: DEBUG nova.network.neutron [-] [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 993.063892] env[62096]: DEBUG nova.network.neutron [-] [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 993.064393] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 95ed48368bc544b88bc942de0fe207e7 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 993.071994] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 95ed48368bc544b88bc942de0fe207e7 [ 993.103817] env[62096]: DEBUG nova.compute.manager [None req-8b6dbe28-b974-4383-b12f-9100c9a10226 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] Start spawning the instance on the hypervisor. {{(pid=62096) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 993.129624] env[62096]: DEBUG nova.virt.hardware [None req-8b6dbe28-b974-4383-b12f-9100c9a10226 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-19T11:52:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-19T11:51:55Z,direct_url=,disk_format='vmdk',id=fb764baa-3805-45c4-a694-aa91b0932110,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bd76085d004b408bbcf1ebf76ba2593e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-19T11:51:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 993.130721] env[62096]: DEBUG nova.virt.hardware [None req-8b6dbe28-b974-4383-b12f-9100c9a10226 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Flavor limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 993.130920] env[62096]: DEBUG nova.virt.hardware [None req-8b6dbe28-b974-4383-b12f-9100c9a10226 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Image limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 993.131136] env[62096]: DEBUG nova.virt.hardware [None req-8b6dbe28-b974-4383-b12f-9100c9a10226 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Flavor pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 993.131302] env[62096]: DEBUG nova.virt.hardware [None req-8b6dbe28-b974-4383-b12f-9100c9a10226 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Image pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 993.131449] env[62096]: DEBUG nova.virt.hardware [None req-8b6dbe28-b974-4383-b12f-9100c9a10226 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 993.131654] env[62096]: DEBUG nova.virt.hardware [None req-8b6dbe28-b974-4383-b12f-9100c9a10226 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 993.131810] env[62096]: DEBUG nova.virt.hardware [None req-8b6dbe28-b974-4383-b12f-9100c9a10226 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 993.131970] env[62096]: DEBUG nova.virt.hardware [None req-8b6dbe28-b974-4383-b12f-9100c9a10226 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Got 1 possible topologies {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 993.132155] env[62096]: DEBUG nova.virt.hardware [None req-8b6dbe28-b974-4383-b12f-9100c9a10226 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 993.132326] env[62096]: DEBUG nova.virt.hardware [None req-8b6dbe28-b974-4383-b12f-9100c9a10226 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 993.133155] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb00b828-8ffe-41a0-a12f-fb9974a0ef6b {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.140596] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d34b4a40-cbb9-46b8-8a2a-2540145c69e0 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.153791] env[62096]: ERROR nova.compute.manager [None req-8b6dbe28-b974-4383-b12f-9100c9a10226 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port db55597d-723e-430a-b215-c9cfa9baa850, please check neutron logs for more information. [ 993.153791] env[62096]: ERROR nova.compute.manager [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] Traceback (most recent call last): [ 993.153791] env[62096]: ERROR nova.compute.manager [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 993.153791] env[62096]: ERROR nova.compute.manager [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] yield resources [ 993.153791] env[62096]: ERROR nova.compute.manager [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 993.153791] env[62096]: ERROR nova.compute.manager [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] self.driver.spawn(context, instance, image_meta, [ 993.153791] env[62096]: ERROR nova.compute.manager [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 993.153791] env[62096]: ERROR nova.compute.manager [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 993.153791] env[62096]: ERROR nova.compute.manager [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 993.153791] env[62096]: ERROR nova.compute.manager [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] vm_ref = self.build_virtual_machine(instance, [ 993.153791] env[62096]: ERROR nova.compute.manager [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 993.154583] env[62096]: ERROR nova.compute.manager [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] vif_infos = vmwarevif.get_vif_info(self._session, [ 993.154583] env[62096]: ERROR nova.compute.manager [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 993.154583] env[62096]: ERROR nova.compute.manager [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] for vif in network_info: [ 993.154583] env[62096]: ERROR nova.compute.manager [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 993.154583] env[62096]: ERROR nova.compute.manager [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] return self._sync_wrapper(fn, *args, **kwargs) [ 993.154583] env[62096]: ERROR nova.compute.manager [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 993.154583] env[62096]: ERROR nova.compute.manager [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] self.wait() [ 993.154583] env[62096]: ERROR nova.compute.manager [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 993.154583] env[62096]: ERROR nova.compute.manager [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] self[:] = self._gt.wait() [ 993.154583] env[62096]: ERROR nova.compute.manager [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 993.154583] env[62096]: ERROR nova.compute.manager [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] return self._exit_event.wait() [ 993.154583] env[62096]: ERROR nova.compute.manager [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 993.154583] env[62096]: ERROR nova.compute.manager [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] current.throw(*self._exc) [ 993.155047] env[62096]: ERROR nova.compute.manager [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 993.155047] env[62096]: ERROR nova.compute.manager [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] result = function(*args, **kwargs) [ 993.155047] env[62096]: ERROR nova.compute.manager [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 993.155047] env[62096]: ERROR nova.compute.manager [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] return func(*args, **kwargs) [ 993.155047] env[62096]: ERROR nova.compute.manager [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 993.155047] env[62096]: ERROR nova.compute.manager [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] raise e [ 993.155047] env[62096]: ERROR nova.compute.manager [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 993.155047] env[62096]: ERROR nova.compute.manager [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] nwinfo = self.network_api.allocate_for_instance( [ 993.155047] env[62096]: ERROR nova.compute.manager [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 993.155047] env[62096]: ERROR nova.compute.manager [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] created_port_ids = self._update_ports_for_instance( [ 993.155047] env[62096]: ERROR nova.compute.manager [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 993.155047] env[62096]: ERROR nova.compute.manager [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] with excutils.save_and_reraise_exception(): [ 993.155047] env[62096]: ERROR nova.compute.manager [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 993.155492] env[62096]: ERROR nova.compute.manager [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] self.force_reraise() [ 993.155492] env[62096]: ERROR nova.compute.manager [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 993.155492] env[62096]: ERROR nova.compute.manager [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] raise self.value [ 993.155492] env[62096]: ERROR nova.compute.manager [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 993.155492] env[62096]: ERROR nova.compute.manager [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] updated_port = self._update_port( [ 993.155492] env[62096]: ERROR nova.compute.manager [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 993.155492] env[62096]: ERROR nova.compute.manager [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] _ensure_no_port_binding_failure(port) [ 993.155492] env[62096]: ERROR nova.compute.manager [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 993.155492] env[62096]: ERROR nova.compute.manager [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] raise exception.PortBindingFailed(port_id=port['id']) [ 993.155492] env[62096]: ERROR nova.compute.manager [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] nova.exception.PortBindingFailed: Binding failed for port db55597d-723e-430a-b215-c9cfa9baa850, please check neutron logs for more information. [ 993.155492] env[62096]: ERROR nova.compute.manager [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] [ 993.155492] env[62096]: INFO nova.compute.manager [None req-8b6dbe28-b974-4383-b12f-9100c9a10226 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] Terminating instance [ 993.156033] env[62096]: DEBUG oslo_concurrency.lockutils [None req-8b6dbe28-b974-4383-b12f-9100c9a10226 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Acquiring lock "refresh_cache-82731039-b916-47bb-b00f-b7f7785d9fc3" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 993.354137] env[62096]: DEBUG nova.network.neutron [req-dfd94e92-509a-4dab-a2ae-cf0926899a7b req-046b7d18-6432-48e6-847f-56d6d9949ba0 service nova] [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 993.428861] env[62096]: DEBUG nova.network.neutron [req-dfd94e92-509a-4dab-a2ae-cf0926899a7b req-046b7d18-6432-48e6-847f-56d6d9949ba0 service nova] [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 993.429435] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-dfd94e92-509a-4dab-a2ae-cf0926899a7b req-046b7d18-6432-48e6-847f-56d6d9949ba0 service nova] Expecting reply to msg 5ee03fc252ad464c83b1f90a4bb405d0 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 993.439305] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5ee03fc252ad464c83b1f90a4bb405d0 [ 993.566097] env[62096]: DEBUG nova.network.neutron [-] [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 993.567008] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 41ab6354e802430abac20480264c2eb8 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 993.575382] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 41ab6354e802430abac20480264c2eb8 [ 993.932385] env[62096]: DEBUG oslo_concurrency.lockutils [req-dfd94e92-509a-4dab-a2ae-cf0926899a7b req-046b7d18-6432-48e6-847f-56d6d9949ba0 service nova] Releasing lock "refresh_cache-82731039-b916-47bb-b00f-b7f7785d9fc3" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 993.932631] env[62096]: DEBUG nova.compute.manager [req-dfd94e92-509a-4dab-a2ae-cf0926899a7b req-046b7d18-6432-48e6-847f-56d6d9949ba0 service nova] [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] Received event network-vif-deleted-db55597d-723e-430a-b215-c9cfa9baa850 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 993.932964] env[62096]: DEBUG oslo_concurrency.lockutils [None req-8b6dbe28-b974-4383-b12f-9100c9a10226 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Acquired lock "refresh_cache-82731039-b916-47bb-b00f-b7f7785d9fc3" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 993.933136] env[62096]: DEBUG nova.network.neutron [None req-8b6dbe28-b974-4383-b12f-9100c9a10226 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 993.933580] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-8b6dbe28-b974-4383-b12f-9100c9a10226 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg 27b1b59e465d4eeb9ffc1be6c6983149 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 993.940642] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 27b1b59e465d4eeb9ffc1be6c6983149 [ 994.069408] env[62096]: INFO nova.compute.manager [-] [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] Took 1.02 seconds to deallocate network for instance. [ 994.071726] env[62096]: DEBUG nova.compute.claims [None req-4c152c89-0e02-4195-99d0-8f9cd162e35b tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] Aborting claim: {{(pid=62096) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 994.071909] env[62096]: DEBUG oslo_concurrency.lockutils [None req-4c152c89-0e02-4195-99d0-8f9cd162e35b tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 994.072178] env[62096]: DEBUG oslo_concurrency.lockutils [None req-4c152c89-0e02-4195-99d0-8f9cd162e35b tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 994.073996] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4c152c89-0e02-4195-99d0-8f9cd162e35b tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg 8df3096693b948d1b7236e7635f0002b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 994.111913] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8df3096693b948d1b7236e7635f0002b [ 994.450768] env[62096]: DEBUG nova.network.neutron [None req-8b6dbe28-b974-4383-b12f-9100c9a10226 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 994.520407] env[62096]: DEBUG nova.network.neutron [None req-8b6dbe28-b974-4383-b12f-9100c9a10226 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 994.520934] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-8b6dbe28-b974-4383-b12f-9100c9a10226 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg 46f78762ef4c45ce9637801705b11528 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 994.529267] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 46f78762ef4c45ce9637801705b11528 [ 994.616196] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c20dcac-f4ea-4048-a8f6-ff5adf7c4545 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.623858] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b03a9bcf-4a0b-4687-ad3a-547bc0bd10d7 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.651756] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59b0684b-5567-4230-a0d3-042b5e3142ff {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.658352] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5aaf4aee-6976-47d8-8d80-2a7ee99e2d20 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.670899] env[62096]: DEBUG nova.compute.provider_tree [None req-4c152c89-0e02-4195-99d0-8f9cd162e35b tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 994.671374] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4c152c89-0e02-4195-99d0-8f9cd162e35b tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg 8bf0bee577f94c479e713116b49f0ff1 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 994.678644] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8bf0bee577f94c479e713116b49f0ff1 [ 995.023115] env[62096]: DEBUG oslo_concurrency.lockutils [None req-8b6dbe28-b974-4383-b12f-9100c9a10226 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Releasing lock "refresh_cache-82731039-b916-47bb-b00f-b7f7785d9fc3" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 995.023559] env[62096]: DEBUG nova.compute.manager [None req-8b6dbe28-b974-4383-b12f-9100c9a10226 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] Start destroying the instance on the hypervisor. {{(pid=62096) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 995.023758] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-8b6dbe28-b974-4383-b12f-9100c9a10226 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] Destroying instance {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 995.024187] env[62096]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e456978b-47fa-4a68-963a-4d1614a142e6 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.033195] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-827b1bed-53e4-492b-a6f1-c2b343e7143c {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.053297] env[62096]: WARNING nova.virt.vmwareapi.vmops [None req-8b6dbe28-b974-4383-b12f-9100c9a10226 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 82731039-b916-47bb-b00f-b7f7785d9fc3 could not be found. [ 995.053497] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-8b6dbe28-b974-4383-b12f-9100c9a10226 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] Instance destroyed {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 995.053674] env[62096]: INFO nova.compute.manager [None req-8b6dbe28-b974-4383-b12f-9100c9a10226 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] Took 0.03 seconds to destroy the instance on the hypervisor. [ 995.053904] env[62096]: DEBUG oslo.service.loopingcall [None req-8b6dbe28-b974-4383-b12f-9100c9a10226 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 995.054129] env[62096]: DEBUG nova.compute.manager [-] [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 995.054219] env[62096]: DEBUG nova.network.neutron [-] [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 995.067771] env[62096]: DEBUG nova.network.neutron [-] [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 995.068203] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 16f0c87d65e04430bfac3f9b6155c9bf in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 995.075020] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 16f0c87d65e04430bfac3f9b6155c9bf [ 995.173943] env[62096]: DEBUG nova.scheduler.client.report [None req-4c152c89-0e02-4195-99d0-8f9cd162e35b tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 995.176244] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4c152c89-0e02-4195-99d0-8f9cd162e35b tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg 93593901828f4eb0b9ce12c2199e70c0 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 995.187264] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 93593901828f4eb0b9ce12c2199e70c0 [ 995.570371] env[62096]: DEBUG nova.network.neutron [-] [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 995.570857] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg bb586ab4715e41c88587465488c94c85 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 995.578937] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bb586ab4715e41c88587465488c94c85 [ 995.679209] env[62096]: DEBUG oslo_concurrency.lockutils [None req-4c152c89-0e02-4195-99d0-8f9cd162e35b tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.607s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 995.680090] env[62096]: ERROR nova.compute.manager [None req-4c152c89-0e02-4195-99d0-8f9cd162e35b tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port c9a90464-f5be-447e-84d8-be11d5cc35ae, please check neutron logs for more information. [ 995.680090] env[62096]: ERROR nova.compute.manager [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] Traceback (most recent call last): [ 995.680090] env[62096]: ERROR nova.compute.manager [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 995.680090] env[62096]: ERROR nova.compute.manager [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] self.driver.spawn(context, instance, image_meta, [ 995.680090] env[62096]: ERROR nova.compute.manager [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 995.680090] env[62096]: ERROR nova.compute.manager [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] self._vmops.spawn(context, instance, image_meta, injected_files, [ 995.680090] env[62096]: ERROR nova.compute.manager [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 995.680090] env[62096]: ERROR nova.compute.manager [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] vm_ref = self.build_virtual_machine(instance, [ 995.680090] env[62096]: ERROR nova.compute.manager [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 995.680090] env[62096]: ERROR nova.compute.manager [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] vif_infos = vmwarevif.get_vif_info(self._session, [ 995.680090] env[62096]: ERROR nova.compute.manager [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 995.680512] env[62096]: ERROR nova.compute.manager [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] for vif in network_info: [ 995.680512] env[62096]: ERROR nova.compute.manager [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 995.680512] env[62096]: ERROR nova.compute.manager [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] return self._sync_wrapper(fn, *args, **kwargs) [ 995.680512] env[62096]: ERROR nova.compute.manager [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 995.680512] env[62096]: ERROR nova.compute.manager [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] self.wait() [ 995.680512] env[62096]: ERROR nova.compute.manager [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 995.680512] env[62096]: ERROR nova.compute.manager [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] self[:] = self._gt.wait() [ 995.680512] env[62096]: ERROR nova.compute.manager [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 995.680512] env[62096]: ERROR nova.compute.manager [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] return self._exit_event.wait() [ 995.680512] env[62096]: ERROR nova.compute.manager [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 995.680512] env[62096]: ERROR nova.compute.manager [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] result = hub.switch() [ 995.680512] env[62096]: ERROR nova.compute.manager [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 995.680512] env[62096]: ERROR nova.compute.manager [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] return self.greenlet.switch() [ 995.680874] env[62096]: ERROR nova.compute.manager [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 995.680874] env[62096]: ERROR nova.compute.manager [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] result = function(*args, **kwargs) [ 995.680874] env[62096]: ERROR nova.compute.manager [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 995.680874] env[62096]: ERROR nova.compute.manager [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] return func(*args, **kwargs) [ 995.680874] env[62096]: ERROR nova.compute.manager [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 995.680874] env[62096]: ERROR nova.compute.manager [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] raise e [ 995.680874] env[62096]: ERROR nova.compute.manager [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 995.680874] env[62096]: ERROR nova.compute.manager [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] nwinfo = self.network_api.allocate_for_instance( [ 995.680874] env[62096]: ERROR nova.compute.manager [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 995.680874] env[62096]: ERROR nova.compute.manager [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] created_port_ids = self._update_ports_for_instance( [ 995.680874] env[62096]: ERROR nova.compute.manager [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 995.680874] env[62096]: ERROR nova.compute.manager [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] with excutils.save_and_reraise_exception(): [ 995.680874] env[62096]: ERROR nova.compute.manager [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 995.681220] env[62096]: ERROR nova.compute.manager [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] self.force_reraise() [ 995.681220] env[62096]: ERROR nova.compute.manager [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 995.681220] env[62096]: ERROR nova.compute.manager [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] raise self.value [ 995.681220] env[62096]: ERROR nova.compute.manager [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 995.681220] env[62096]: ERROR nova.compute.manager [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] updated_port = self._update_port( [ 995.681220] env[62096]: ERROR nova.compute.manager [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 995.681220] env[62096]: ERROR nova.compute.manager [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] _ensure_no_port_binding_failure(port) [ 995.681220] env[62096]: ERROR nova.compute.manager [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 995.681220] env[62096]: ERROR nova.compute.manager [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] raise exception.PortBindingFailed(port_id=port['id']) [ 995.681220] env[62096]: ERROR nova.compute.manager [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] nova.exception.PortBindingFailed: Binding failed for port c9a90464-f5be-447e-84d8-be11d5cc35ae, please check neutron logs for more information. [ 995.681220] env[62096]: ERROR nova.compute.manager [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] [ 995.681527] env[62096]: DEBUG nova.compute.utils [None req-4c152c89-0e02-4195-99d0-8f9cd162e35b tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] Binding failed for port c9a90464-f5be-447e-84d8-be11d5cc35ae, please check neutron logs for more information. {{(pid=62096) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 995.683096] env[62096]: DEBUG nova.compute.manager [None req-4c152c89-0e02-4195-99d0-8f9cd162e35b tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] Build of instance 3266e07d-a297-49e4-945c-a4da92b81d14 was re-scheduled: Binding failed for port c9a90464-f5be-447e-84d8-be11d5cc35ae, please check neutron logs for more information. {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 995.683569] env[62096]: DEBUG nova.compute.manager [None req-4c152c89-0e02-4195-99d0-8f9cd162e35b tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] Unplugging VIFs for instance {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 995.683803] env[62096]: DEBUG oslo_concurrency.lockutils [None req-4c152c89-0e02-4195-99d0-8f9cd162e35b tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Acquiring lock "refresh_cache-3266e07d-a297-49e4-945c-a4da92b81d14" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 995.683949] env[62096]: DEBUG oslo_concurrency.lockutils [None req-4c152c89-0e02-4195-99d0-8f9cd162e35b tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Acquired lock "refresh_cache-3266e07d-a297-49e4-945c-a4da92b81d14" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 995.684179] env[62096]: DEBUG nova.network.neutron [None req-4c152c89-0e02-4195-99d0-8f9cd162e35b tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 995.684597] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4c152c89-0e02-4195-99d0-8f9cd162e35b tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg 33a3a7a9a4ea48a89887b823db042b8c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 995.691464] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 33a3a7a9a4ea48a89887b823db042b8c [ 996.073236] env[62096]: INFO nova.compute.manager [-] [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] Took 1.02 seconds to deallocate network for instance. [ 996.075551] env[62096]: DEBUG nova.compute.claims [None req-8b6dbe28-b974-4383-b12f-9100c9a10226 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] Aborting claim: {{(pid=62096) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 996.075716] env[62096]: DEBUG oslo_concurrency.lockutils [None req-8b6dbe28-b974-4383-b12f-9100c9a10226 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 996.075927] env[62096]: DEBUG oslo_concurrency.lockutils [None req-8b6dbe28-b974-4383-b12f-9100c9a10226 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 996.077723] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-8b6dbe28-b974-4383-b12f-9100c9a10226 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg f73915fdc4fc42bba0f92d45fee11733 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 996.110873] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f73915fdc4fc42bba0f92d45fee11733 [ 996.201236] env[62096]: DEBUG nova.network.neutron [None req-4c152c89-0e02-4195-99d0-8f9cd162e35b tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 996.267401] env[62096]: DEBUG nova.network.neutron [None req-4c152c89-0e02-4195-99d0-8f9cd162e35b tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 996.267895] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4c152c89-0e02-4195-99d0-8f9cd162e35b tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg 7520ea77302d42998f87f47dfd35ac47 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 996.275936] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7520ea77302d42998f87f47dfd35ac47 [ 996.617474] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d9da683-adc7-4ef8-95c1-0fe84cb437b0 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.624398] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e2dfb3b-e2f1-4ee8-a21e-c5a3e9e08755 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.653978] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-113cdec4-697f-4d3c-95b4-dc6c5450dd54 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.660933] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-050c08fa-f05a-4e24-9a2a-c21539bc4db5 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.673514] env[62096]: DEBUG nova.compute.provider_tree [None req-8b6dbe28-b974-4383-b12f-9100c9a10226 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 996.673982] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-8b6dbe28-b974-4383-b12f-9100c9a10226 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg d1fb8740ea304de093384ab0e1ac2310 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 996.681990] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d1fb8740ea304de093384ab0e1ac2310 [ 996.770805] env[62096]: DEBUG oslo_concurrency.lockutils [None req-4c152c89-0e02-4195-99d0-8f9cd162e35b tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Releasing lock "refresh_cache-3266e07d-a297-49e4-945c-a4da92b81d14" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 996.771040] env[62096]: DEBUG nova.compute.manager [None req-4c152c89-0e02-4195-99d0-8f9cd162e35b tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 996.771198] env[62096]: DEBUG nova.compute.manager [None req-4c152c89-0e02-4195-99d0-8f9cd162e35b tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 996.771362] env[62096]: DEBUG nova.network.neutron [None req-4c152c89-0e02-4195-99d0-8f9cd162e35b tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 996.785505] env[62096]: DEBUG nova.network.neutron [None req-4c152c89-0e02-4195-99d0-8f9cd162e35b tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 996.786035] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4c152c89-0e02-4195-99d0-8f9cd162e35b tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg 63478e9c7d1b4e2099d93b70305912be in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 996.792162] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 63478e9c7d1b4e2099d93b70305912be [ 997.176588] env[62096]: DEBUG nova.scheduler.client.report [None req-8b6dbe28-b974-4383-b12f-9100c9a10226 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 997.178793] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-8b6dbe28-b974-4383-b12f-9100c9a10226 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg bcd94a884a674208816bb0bc3f8de21a in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 997.189379] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bcd94a884a674208816bb0bc3f8de21a [ 997.288217] env[62096]: DEBUG nova.network.neutron [None req-4c152c89-0e02-4195-99d0-8f9cd162e35b tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 997.288685] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4c152c89-0e02-4195-99d0-8f9cd162e35b tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg 6f4b70cdbbc1480fbb640358418ed2cf in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 997.296248] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6f4b70cdbbc1480fbb640358418ed2cf [ 997.681676] env[62096]: DEBUG oslo_concurrency.lockutils [None req-8b6dbe28-b974-4383-b12f-9100c9a10226 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.606s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 997.682342] env[62096]: ERROR nova.compute.manager [None req-8b6dbe28-b974-4383-b12f-9100c9a10226 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port db55597d-723e-430a-b215-c9cfa9baa850, please check neutron logs for more information. [ 997.682342] env[62096]: ERROR nova.compute.manager [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] Traceback (most recent call last): [ 997.682342] env[62096]: ERROR nova.compute.manager [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 997.682342] env[62096]: ERROR nova.compute.manager [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] self.driver.spawn(context, instance, image_meta, [ 997.682342] env[62096]: ERROR nova.compute.manager [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 997.682342] env[62096]: ERROR nova.compute.manager [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 997.682342] env[62096]: ERROR nova.compute.manager [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 997.682342] env[62096]: ERROR nova.compute.manager [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] vm_ref = self.build_virtual_machine(instance, [ 997.682342] env[62096]: ERROR nova.compute.manager [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 997.682342] env[62096]: ERROR nova.compute.manager [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] vif_infos = vmwarevif.get_vif_info(self._session, [ 997.682342] env[62096]: ERROR nova.compute.manager [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 997.682870] env[62096]: ERROR nova.compute.manager [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] for vif in network_info: [ 997.682870] env[62096]: ERROR nova.compute.manager [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 997.682870] env[62096]: ERROR nova.compute.manager [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] return self._sync_wrapper(fn, *args, **kwargs) [ 997.682870] env[62096]: ERROR nova.compute.manager [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 997.682870] env[62096]: ERROR nova.compute.manager [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] self.wait() [ 997.682870] env[62096]: ERROR nova.compute.manager [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 997.682870] env[62096]: ERROR nova.compute.manager [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] self[:] = self._gt.wait() [ 997.682870] env[62096]: ERROR nova.compute.manager [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 997.682870] env[62096]: ERROR nova.compute.manager [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] return self._exit_event.wait() [ 997.682870] env[62096]: ERROR nova.compute.manager [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 997.682870] env[62096]: ERROR nova.compute.manager [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] current.throw(*self._exc) [ 997.682870] env[62096]: ERROR nova.compute.manager [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 997.682870] env[62096]: ERROR nova.compute.manager [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] result = function(*args, **kwargs) [ 997.683458] env[62096]: ERROR nova.compute.manager [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 997.683458] env[62096]: ERROR nova.compute.manager [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] return func(*args, **kwargs) [ 997.683458] env[62096]: ERROR nova.compute.manager [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 997.683458] env[62096]: ERROR nova.compute.manager [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] raise e [ 997.683458] env[62096]: ERROR nova.compute.manager [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 997.683458] env[62096]: ERROR nova.compute.manager [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] nwinfo = self.network_api.allocate_for_instance( [ 997.683458] env[62096]: ERROR nova.compute.manager [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 997.683458] env[62096]: ERROR nova.compute.manager [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] created_port_ids = self._update_ports_for_instance( [ 997.683458] env[62096]: ERROR nova.compute.manager [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 997.683458] env[62096]: ERROR nova.compute.manager [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] with excutils.save_and_reraise_exception(): [ 997.683458] env[62096]: ERROR nova.compute.manager [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 997.683458] env[62096]: ERROR nova.compute.manager [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] self.force_reraise() [ 997.683458] env[62096]: ERROR nova.compute.manager [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 997.684090] env[62096]: ERROR nova.compute.manager [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] raise self.value [ 997.684090] env[62096]: ERROR nova.compute.manager [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 997.684090] env[62096]: ERROR nova.compute.manager [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] updated_port = self._update_port( [ 997.684090] env[62096]: ERROR nova.compute.manager [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 997.684090] env[62096]: ERROR nova.compute.manager [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] _ensure_no_port_binding_failure(port) [ 997.684090] env[62096]: ERROR nova.compute.manager [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 997.684090] env[62096]: ERROR nova.compute.manager [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] raise exception.PortBindingFailed(port_id=port['id']) [ 997.684090] env[62096]: ERROR nova.compute.manager [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] nova.exception.PortBindingFailed: Binding failed for port db55597d-723e-430a-b215-c9cfa9baa850, please check neutron logs for more information. [ 997.684090] env[62096]: ERROR nova.compute.manager [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] [ 997.684090] env[62096]: DEBUG nova.compute.utils [None req-8b6dbe28-b974-4383-b12f-9100c9a10226 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] Binding failed for port db55597d-723e-430a-b215-c9cfa9baa850, please check neutron logs for more information. {{(pid=62096) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 997.684719] env[62096]: DEBUG nova.compute.manager [None req-8b6dbe28-b974-4383-b12f-9100c9a10226 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] Build of instance 82731039-b916-47bb-b00f-b7f7785d9fc3 was re-scheduled: Binding failed for port db55597d-723e-430a-b215-c9cfa9baa850, please check neutron logs for more information. {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 997.685147] env[62096]: DEBUG nova.compute.manager [None req-8b6dbe28-b974-4383-b12f-9100c9a10226 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] Unplugging VIFs for instance {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 997.685385] env[62096]: DEBUG oslo_concurrency.lockutils [None req-8b6dbe28-b974-4383-b12f-9100c9a10226 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Acquiring lock "refresh_cache-82731039-b916-47bb-b00f-b7f7785d9fc3" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 997.685529] env[62096]: DEBUG oslo_concurrency.lockutils [None req-8b6dbe28-b974-4383-b12f-9100c9a10226 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Acquired lock "refresh_cache-82731039-b916-47bb-b00f-b7f7785d9fc3" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 997.685684] env[62096]: DEBUG nova.network.neutron [None req-8b6dbe28-b974-4383-b12f-9100c9a10226 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 997.686089] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-8b6dbe28-b974-4383-b12f-9100c9a10226 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg bb917c9f8aaa4051938c792dad20b581 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 997.692964] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bb917c9f8aaa4051938c792dad20b581 [ 997.791664] env[62096]: INFO nova.compute.manager [None req-4c152c89-0e02-4195-99d0-8f9cd162e35b tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: 3266e07d-a297-49e4-945c-a4da92b81d14] Took 1.02 seconds to deallocate network for instance. [ 997.793366] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4c152c89-0e02-4195-99d0-8f9cd162e35b tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg 9ff4d6b1a11c4ae38e9a6047fa2660e0 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 997.824833] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9ff4d6b1a11c4ae38e9a6047fa2660e0 [ 998.202839] env[62096]: DEBUG nova.network.neutron [None req-8b6dbe28-b974-4383-b12f-9100c9a10226 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 998.269076] env[62096]: DEBUG nova.network.neutron [None req-8b6dbe28-b974-4383-b12f-9100c9a10226 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 998.269591] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-8b6dbe28-b974-4383-b12f-9100c9a10226 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg 10e48a24106b4252bdea1d4e89b2c2d7 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 998.277669] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 10e48a24106b4252bdea1d4e89b2c2d7 [ 998.297630] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4c152c89-0e02-4195-99d0-8f9cd162e35b tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg 5e529b110bbe473e9b0bfb7e7ebc6c4c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 998.324720] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5e529b110bbe473e9b0bfb7e7ebc6c4c [ 998.771579] env[62096]: DEBUG oslo_concurrency.lockutils [None req-8b6dbe28-b974-4383-b12f-9100c9a10226 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Releasing lock "refresh_cache-82731039-b916-47bb-b00f-b7f7785d9fc3" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 998.771812] env[62096]: DEBUG nova.compute.manager [None req-8b6dbe28-b974-4383-b12f-9100c9a10226 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 998.771995] env[62096]: DEBUG nova.compute.manager [None req-8b6dbe28-b974-4383-b12f-9100c9a10226 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 998.772181] env[62096]: DEBUG nova.network.neutron [None req-8b6dbe28-b974-4383-b12f-9100c9a10226 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 998.786329] env[62096]: DEBUG nova.network.neutron [None req-8b6dbe28-b974-4383-b12f-9100c9a10226 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 998.786855] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-8b6dbe28-b974-4383-b12f-9100c9a10226 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg bc0911f6e133440197f1b6940710c49e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 998.793385] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bc0911f6e133440197f1b6940710c49e [ 998.816645] env[62096]: INFO nova.scheduler.client.report [None req-4c152c89-0e02-4195-99d0-8f9cd162e35b tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Deleted allocations for instance 3266e07d-a297-49e4-945c-a4da92b81d14 [ 998.822236] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-4c152c89-0e02-4195-99d0-8f9cd162e35b tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg e89687df690e47298cd2a1b917d3def4 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 998.834270] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e89687df690e47298cd2a1b917d3def4 [ 999.289883] env[62096]: DEBUG nova.network.neutron [None req-8b6dbe28-b974-4383-b12f-9100c9a10226 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 999.290400] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-8b6dbe28-b974-4383-b12f-9100c9a10226 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg 444664aaab1443ad9653c49e4ea86629 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 999.299062] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 444664aaab1443ad9653c49e4ea86629 [ 999.323781] env[62096]: DEBUG oslo_concurrency.lockutils [None req-4c152c89-0e02-4195-99d0-8f9cd162e35b tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Lock "3266e07d-a297-49e4-945c-a4da92b81d14" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.808s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 999.792960] env[62096]: INFO nova.compute.manager [None req-8b6dbe28-b974-4383-b12f-9100c9a10226 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] [instance: 82731039-b916-47bb-b00f-b7f7785d9fc3] Took 1.02 seconds to deallocate network for instance. [ 999.794840] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-8b6dbe28-b974-4383-b12f-9100c9a10226 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg af41e95260974f39960b36f985575efa in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 999.832774] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg af41e95260974f39960b36f985575efa [ 1000.299244] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-8b6dbe28-b974-4383-b12f-9100c9a10226 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg facc51095f9a4e048e101b1c582737d3 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1000.329266] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg facc51095f9a4e048e101b1c582737d3 [ 1000.818761] env[62096]: INFO nova.scheduler.client.report [None req-8b6dbe28-b974-4383-b12f-9100c9a10226 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Deleted allocations for instance 82731039-b916-47bb-b00f-b7f7785d9fc3 [ 1000.824471] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-8b6dbe28-b974-4383-b12f-9100c9a10226 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Expecting reply to msg ecad2632e35245d88d9716683023a65e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1000.838537] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ecad2632e35245d88d9716683023a65e [ 1000.958587] env[62096]: DEBUG oslo_concurrency.lockutils [None req-d277bcb2-1cbc-400e-ad31-8d061d7543d6 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Acquiring lock "ff80d910-cc71-4a08-bd84-4d3a87cfaee2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1000.958816] env[62096]: DEBUG oslo_concurrency.lockutils [None req-d277bcb2-1cbc-400e-ad31-8d061d7543d6 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Lock "ff80d910-cc71-4a08-bd84-4d3a87cfaee2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1000.959550] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d277bcb2-1cbc-400e-ad31-8d061d7543d6 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg 207be3d1f79f43c786178c7bc7a5aa74 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1000.968922] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 207be3d1f79f43c786178c7bc7a5aa74 [ 1001.326075] env[62096]: DEBUG oslo_concurrency.lockutils [None req-8b6dbe28-b974-4383-b12f-9100c9a10226 tempest-ServersTestJSON-796182065 tempest-ServersTestJSON-796182065-project-member] Lock "82731039-b916-47bb-b00f-b7f7785d9fc3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.397s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1001.461941] env[62096]: DEBUG nova.compute.manager [None req-d277bcb2-1cbc-400e-ad31-8d061d7543d6 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] Starting instance... {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1001.464303] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d277bcb2-1cbc-400e-ad31-8d061d7543d6 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg e86605f02e9e453f84a1a45fb3680bff in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1001.496608] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e86605f02e9e453f84a1a45fb3680bff [ 1001.983078] env[62096]: DEBUG oslo_concurrency.lockutils [None req-d277bcb2-1cbc-400e-ad31-8d061d7543d6 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1001.983373] env[62096]: DEBUG oslo_concurrency.lockutils [None req-d277bcb2-1cbc-400e-ad31-8d061d7543d6 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1001.984887] env[62096]: INFO nova.compute.claims [None req-d277bcb2-1cbc-400e-ad31-8d061d7543d6 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1001.986428] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d277bcb2-1cbc-400e-ad31-8d061d7543d6 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg af3c58de1382491a9bf416337bb17e5f in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1002.024158] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg af3c58de1382491a9bf416337bb17e5f [ 1002.504522] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d277bcb2-1cbc-400e-ad31-8d061d7543d6 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg f24ed83588894599b4d8119361c4802a in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1002.512683] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f24ed83588894599b4d8119361c4802a [ 1003.039923] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78c9b1ce-837a-4b85-9164-fc56e8c62329 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.048686] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35e8cbbc-d59c-4dbe-8e06-e96a1ef4f4f5 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.078537] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8866ba3e-f3ca-4b77-bb46-832fa39b0315 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.086004] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b84587f4-f0fe-480c-abd1-cf13bdabafe1 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.099044] env[62096]: DEBUG nova.compute.provider_tree [None req-d277bcb2-1cbc-400e-ad31-8d061d7543d6 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1003.099561] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d277bcb2-1cbc-400e-ad31-8d061d7543d6 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg 419f8e62683f4c37a53ec66ada28c477 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1003.109214] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 419f8e62683f4c37a53ec66ada28c477 [ 1003.603106] env[62096]: DEBUG nova.scheduler.client.report [None req-d277bcb2-1cbc-400e-ad31-8d061d7543d6 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1003.605681] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d277bcb2-1cbc-400e-ad31-8d061d7543d6 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg a128c01c40834326b9afad77cf5e2a28 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1003.618563] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a128c01c40834326b9afad77cf5e2a28 [ 1004.109987] env[62096]: DEBUG oslo_concurrency.lockutils [None req-d277bcb2-1cbc-400e-ad31-8d061d7543d6 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.126s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1004.110502] env[62096]: DEBUG nova.compute.manager [None req-d277bcb2-1cbc-400e-ad31-8d061d7543d6 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] Start building networks asynchronously for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1004.112197] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d277bcb2-1cbc-400e-ad31-8d061d7543d6 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg 7b6f4c20ed37464daf73d31911757c7a in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1004.150961] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7b6f4c20ed37464daf73d31911757c7a [ 1004.616630] env[62096]: DEBUG nova.compute.utils [None req-d277bcb2-1cbc-400e-ad31-8d061d7543d6 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Using /dev/sd instead of None {{(pid=62096) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1004.616630] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d277bcb2-1cbc-400e-ad31-8d061d7543d6 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg 720005c053c04bc998f880473fcc8b1a in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1004.617668] env[62096]: DEBUG nova.compute.manager [None req-d277bcb2-1cbc-400e-ad31-8d061d7543d6 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] Allocating IP information in the background. {{(pid=62096) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1004.617668] env[62096]: DEBUG nova.network.neutron [None req-d277bcb2-1cbc-400e-ad31-8d061d7543d6 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] allocate_for_instance() {{(pid=62096) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1004.629576] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 720005c053c04bc998f880473fcc8b1a [ 1004.655923] env[62096]: DEBUG nova.policy [None req-d277bcb2-1cbc-400e-ad31-8d061d7543d6 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '491a2069427f43e79347a70e475e4dd2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e45573130e8e4ce4979b37e1b4c5af9f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62096) authorize /opt/stack/nova/nova/policy.py:203}} [ 1005.089264] env[62096]: DEBUG nova.network.neutron [None req-d277bcb2-1cbc-400e-ad31-8d061d7543d6 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] Successfully created port: 6f8d4174-73f4-4c09-ba71-fe3921cddbc6 {{(pid=62096) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1005.121216] env[62096]: DEBUG nova.compute.manager [None req-d277bcb2-1cbc-400e-ad31-8d061d7543d6 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] Start building block device mappings for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1005.123390] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d277bcb2-1cbc-400e-ad31-8d061d7543d6 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg 5432da50e5dc40d89a3b230128797735 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1005.167772] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5432da50e5dc40d89a3b230128797735 [ 1005.629552] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d277bcb2-1cbc-400e-ad31-8d061d7543d6 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg 91d6c6c2279540e1b6eec86a99ee1962 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1005.672318] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 91d6c6c2279540e1b6eec86a99ee1962 [ 1005.828147] env[62096]: DEBUG nova.compute.manager [req-21e67f3c-f501-4494-9159-273bfef54764 req-985244e2-bd2b-4b71-b7d0-502b7b1ebcbc service nova] [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] Received event network-changed-6f8d4174-73f4-4c09-ba71-fe3921cddbc6 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 1005.828147] env[62096]: DEBUG nova.compute.manager [req-21e67f3c-f501-4494-9159-273bfef54764 req-985244e2-bd2b-4b71-b7d0-502b7b1ebcbc service nova] [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] Refreshing instance network info cache due to event network-changed-6f8d4174-73f4-4c09-ba71-fe3921cddbc6. {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 1005.828147] env[62096]: DEBUG oslo_concurrency.lockutils [req-21e67f3c-f501-4494-9159-273bfef54764 req-985244e2-bd2b-4b71-b7d0-502b7b1ebcbc service nova] Acquiring lock "refresh_cache-ff80d910-cc71-4a08-bd84-4d3a87cfaee2" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1005.828147] env[62096]: DEBUG oslo_concurrency.lockutils [req-21e67f3c-f501-4494-9159-273bfef54764 req-985244e2-bd2b-4b71-b7d0-502b7b1ebcbc service nova] Acquired lock "refresh_cache-ff80d910-cc71-4a08-bd84-4d3a87cfaee2" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1005.828147] env[62096]: DEBUG nova.network.neutron [req-21e67f3c-f501-4494-9159-273bfef54764 req-985244e2-bd2b-4b71-b7d0-502b7b1ebcbc service nova] [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] Refreshing network info cache for port 6f8d4174-73f4-4c09-ba71-fe3921cddbc6 {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1005.828632] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-21e67f3c-f501-4494-9159-273bfef54764 req-985244e2-bd2b-4b71-b7d0-502b7b1ebcbc service nova] Expecting reply to msg db2e998508e645e3b7c85767530201fa in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1005.837465] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg db2e998508e645e3b7c85767530201fa [ 1005.998714] env[62096]: ERROR nova.compute.manager [None req-d277bcb2-1cbc-400e-ad31-8d061d7543d6 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 6f8d4174-73f4-4c09-ba71-fe3921cddbc6, please check neutron logs for more information. [ 1005.998714] env[62096]: ERROR nova.compute.manager Traceback (most recent call last): [ 1005.998714] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 1005.998714] env[62096]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 1005.998714] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 1005.998714] env[62096]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 1005.998714] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 1005.998714] env[62096]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 1005.998714] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1005.998714] env[62096]: ERROR nova.compute.manager self.force_reraise() [ 1005.998714] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1005.998714] env[62096]: ERROR nova.compute.manager raise self.value [ 1005.998714] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 1005.998714] env[62096]: ERROR nova.compute.manager updated_port = self._update_port( [ 1005.998714] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1005.998714] env[62096]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 1005.999241] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1005.999241] env[62096]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 1005.999241] env[62096]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 6f8d4174-73f4-4c09-ba71-fe3921cddbc6, please check neutron logs for more information. [ 1005.999241] env[62096]: ERROR nova.compute.manager [ 1005.999241] env[62096]: Traceback (most recent call last): [ 1005.999241] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 1005.999241] env[62096]: listener.cb(fileno) [ 1005.999241] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1005.999241] env[62096]: result = function(*args, **kwargs) [ 1005.999241] env[62096]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 1005.999241] env[62096]: return func(*args, **kwargs) [ 1005.999241] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 1005.999241] env[62096]: raise e [ 1005.999241] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 1005.999241] env[62096]: nwinfo = self.network_api.allocate_for_instance( [ 1005.999241] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 1005.999241] env[62096]: created_port_ids = self._update_ports_for_instance( [ 1005.999241] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 1005.999241] env[62096]: with excutils.save_and_reraise_exception(): [ 1005.999241] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1005.999241] env[62096]: self.force_reraise() [ 1005.999241] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1005.999241] env[62096]: raise self.value [ 1005.999241] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 1005.999241] env[62096]: updated_port = self._update_port( [ 1005.999241] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1005.999241] env[62096]: _ensure_no_port_binding_failure(port) [ 1005.999241] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1005.999241] env[62096]: raise exception.PortBindingFailed(port_id=port['id']) [ 1006.000238] env[62096]: nova.exception.PortBindingFailed: Binding failed for port 6f8d4174-73f4-4c09-ba71-fe3921cddbc6, please check neutron logs for more information. [ 1006.000238] env[62096]: Removing descriptor: 14 [ 1006.133061] env[62096]: DEBUG nova.compute.manager [None req-d277bcb2-1cbc-400e-ad31-8d061d7543d6 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] Start spawning the instance on the hypervisor. {{(pid=62096) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1006.157613] env[62096]: DEBUG nova.virt.hardware [None req-d277bcb2-1cbc-400e-ad31-8d061d7543d6 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-19T11:52:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-19T11:51:55Z,direct_url=,disk_format='vmdk',id=fb764baa-3805-45c4-a694-aa91b0932110,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bd76085d004b408bbcf1ebf76ba2593e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-19T11:51:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1006.157852] env[62096]: DEBUG nova.virt.hardware [None req-d277bcb2-1cbc-400e-ad31-8d061d7543d6 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Flavor limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1006.158007] env[62096]: DEBUG nova.virt.hardware [None req-d277bcb2-1cbc-400e-ad31-8d061d7543d6 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Image limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1006.158185] env[62096]: DEBUG nova.virt.hardware [None req-d277bcb2-1cbc-400e-ad31-8d061d7543d6 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Flavor pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1006.158328] env[62096]: DEBUG nova.virt.hardware [None req-d277bcb2-1cbc-400e-ad31-8d061d7543d6 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Image pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1006.158472] env[62096]: DEBUG nova.virt.hardware [None req-d277bcb2-1cbc-400e-ad31-8d061d7543d6 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1006.158669] env[62096]: DEBUG nova.virt.hardware [None req-d277bcb2-1cbc-400e-ad31-8d061d7543d6 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1006.158822] env[62096]: DEBUG nova.virt.hardware [None req-d277bcb2-1cbc-400e-ad31-8d061d7543d6 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1006.158981] env[62096]: DEBUG nova.virt.hardware [None req-d277bcb2-1cbc-400e-ad31-8d061d7543d6 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Got 1 possible topologies {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1006.159190] env[62096]: DEBUG nova.virt.hardware [None req-d277bcb2-1cbc-400e-ad31-8d061d7543d6 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1006.159500] env[62096]: DEBUG nova.virt.hardware [None req-d277bcb2-1cbc-400e-ad31-8d061d7543d6 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1006.160382] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffbb69b6-0735-46a3-9c3b-e557b1cc0c4e {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.168568] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b32f2014-7c5d-4cf6-b3f8-297b8135cffc {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.182984] env[62096]: ERROR nova.compute.manager [None req-d277bcb2-1cbc-400e-ad31-8d061d7543d6 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 6f8d4174-73f4-4c09-ba71-fe3921cddbc6, please check neutron logs for more information. [ 1006.182984] env[62096]: ERROR nova.compute.manager [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] Traceback (most recent call last): [ 1006.182984] env[62096]: ERROR nova.compute.manager [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 1006.182984] env[62096]: ERROR nova.compute.manager [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] yield resources [ 1006.182984] env[62096]: ERROR nova.compute.manager [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 1006.182984] env[62096]: ERROR nova.compute.manager [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] self.driver.spawn(context, instance, image_meta, [ 1006.182984] env[62096]: ERROR nova.compute.manager [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1006.182984] env[62096]: ERROR nova.compute.manager [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1006.182984] env[62096]: ERROR nova.compute.manager [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 1006.182984] env[62096]: ERROR nova.compute.manager [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] vm_ref = self.build_virtual_machine(instance, [ 1006.182984] env[62096]: ERROR nova.compute.manager [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 1006.183376] env[62096]: ERROR nova.compute.manager [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] vif_infos = vmwarevif.get_vif_info(self._session, [ 1006.183376] env[62096]: ERROR nova.compute.manager [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 1006.183376] env[62096]: ERROR nova.compute.manager [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] for vif in network_info: [ 1006.183376] env[62096]: ERROR nova.compute.manager [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 1006.183376] env[62096]: ERROR nova.compute.manager [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] return self._sync_wrapper(fn, *args, **kwargs) [ 1006.183376] env[62096]: ERROR nova.compute.manager [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 1006.183376] env[62096]: ERROR nova.compute.manager [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] self.wait() [ 1006.183376] env[62096]: ERROR nova.compute.manager [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 1006.183376] env[62096]: ERROR nova.compute.manager [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] self[:] = self._gt.wait() [ 1006.183376] env[62096]: ERROR nova.compute.manager [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 1006.183376] env[62096]: ERROR nova.compute.manager [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] return self._exit_event.wait() [ 1006.183376] env[62096]: ERROR nova.compute.manager [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 1006.183376] env[62096]: ERROR nova.compute.manager [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] current.throw(*self._exc) [ 1006.183843] env[62096]: ERROR nova.compute.manager [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1006.183843] env[62096]: ERROR nova.compute.manager [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] result = function(*args, **kwargs) [ 1006.183843] env[62096]: ERROR nova.compute.manager [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 1006.183843] env[62096]: ERROR nova.compute.manager [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] return func(*args, **kwargs) [ 1006.183843] env[62096]: ERROR nova.compute.manager [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 1006.183843] env[62096]: ERROR nova.compute.manager [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] raise e [ 1006.183843] env[62096]: ERROR nova.compute.manager [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 1006.183843] env[62096]: ERROR nova.compute.manager [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] nwinfo = self.network_api.allocate_for_instance( [ 1006.183843] env[62096]: ERROR nova.compute.manager [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 1006.183843] env[62096]: ERROR nova.compute.manager [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] created_port_ids = self._update_ports_for_instance( [ 1006.183843] env[62096]: ERROR nova.compute.manager [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 1006.183843] env[62096]: ERROR nova.compute.manager [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] with excutils.save_and_reraise_exception(): [ 1006.183843] env[62096]: ERROR nova.compute.manager [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1006.184361] env[62096]: ERROR nova.compute.manager [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] self.force_reraise() [ 1006.184361] env[62096]: ERROR nova.compute.manager [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1006.184361] env[62096]: ERROR nova.compute.manager [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] raise self.value [ 1006.184361] env[62096]: ERROR nova.compute.manager [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 1006.184361] env[62096]: ERROR nova.compute.manager [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] updated_port = self._update_port( [ 1006.184361] env[62096]: ERROR nova.compute.manager [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1006.184361] env[62096]: ERROR nova.compute.manager [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] _ensure_no_port_binding_failure(port) [ 1006.184361] env[62096]: ERROR nova.compute.manager [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1006.184361] env[62096]: ERROR nova.compute.manager [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] raise exception.PortBindingFailed(port_id=port['id']) [ 1006.184361] env[62096]: ERROR nova.compute.manager [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] nova.exception.PortBindingFailed: Binding failed for port 6f8d4174-73f4-4c09-ba71-fe3921cddbc6, please check neutron logs for more information. [ 1006.184361] env[62096]: ERROR nova.compute.manager [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] [ 1006.184361] env[62096]: INFO nova.compute.manager [None req-d277bcb2-1cbc-400e-ad31-8d061d7543d6 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] Terminating instance [ 1006.185380] env[62096]: DEBUG oslo_concurrency.lockutils [None req-d277bcb2-1cbc-400e-ad31-8d061d7543d6 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Acquiring lock "refresh_cache-ff80d910-cc71-4a08-bd84-4d3a87cfaee2" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1006.192095] env[62096]: DEBUG oslo_concurrency.lockutils [None req-ac57ab3e-a525-4f99-a9d7-b1c2a0201173 tempest-ServersNegativeTestJSON-1037308393 tempest-ServersNegativeTestJSON-1037308393-project-member] Acquiring lock "94a8faf3-cbd0-4a08-b93b-2c37bb040afe" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1006.192435] env[62096]: DEBUG oslo_concurrency.lockutils [None req-ac57ab3e-a525-4f99-a9d7-b1c2a0201173 tempest-ServersNegativeTestJSON-1037308393 tempest-ServersNegativeTestJSON-1037308393-project-member] Lock "94a8faf3-cbd0-4a08-b93b-2c37bb040afe" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1006.192915] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ac57ab3e-a525-4f99-a9d7-b1c2a0201173 tempest-ServersNegativeTestJSON-1037308393 tempest-ServersNegativeTestJSON-1037308393-project-member] Expecting reply to msg fbdecdeb66c94e73bc32283a405a4640 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1006.201776] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fbdecdeb66c94e73bc32283a405a4640 [ 1006.347288] env[62096]: DEBUG nova.network.neutron [req-21e67f3c-f501-4494-9159-273bfef54764 req-985244e2-bd2b-4b71-b7d0-502b7b1ebcbc service nova] [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1006.426661] env[62096]: DEBUG nova.network.neutron [req-21e67f3c-f501-4494-9159-273bfef54764 req-985244e2-bd2b-4b71-b7d0-502b7b1ebcbc service nova] [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1006.427170] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-21e67f3c-f501-4494-9159-273bfef54764 req-985244e2-bd2b-4b71-b7d0-502b7b1ebcbc service nova] Expecting reply to msg 4e362453084c44a7bd3706dcdc989e86 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1006.435059] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4e362453084c44a7bd3706dcdc989e86 [ 1006.695430] env[62096]: DEBUG nova.compute.manager [None req-ac57ab3e-a525-4f99-a9d7-b1c2a0201173 tempest-ServersNegativeTestJSON-1037308393 tempest-ServersNegativeTestJSON-1037308393-project-member] [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] Starting instance... {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1006.697113] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ac57ab3e-a525-4f99-a9d7-b1c2a0201173 tempest-ServersNegativeTestJSON-1037308393 tempest-ServersNegativeTestJSON-1037308393-project-member] Expecting reply to msg 26ab3fb28ff3498eb29bf1f426272e5d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1006.728183] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 26ab3fb28ff3498eb29bf1f426272e5d [ 1006.929511] env[62096]: DEBUG oslo_concurrency.lockutils [req-21e67f3c-f501-4494-9159-273bfef54764 req-985244e2-bd2b-4b71-b7d0-502b7b1ebcbc service nova] Releasing lock "refresh_cache-ff80d910-cc71-4a08-bd84-4d3a87cfaee2" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1006.929975] env[62096]: DEBUG oslo_concurrency.lockutils [None req-d277bcb2-1cbc-400e-ad31-8d061d7543d6 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Acquired lock "refresh_cache-ff80d910-cc71-4a08-bd84-4d3a87cfaee2" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1006.930115] env[62096]: DEBUG nova.network.neutron [None req-d277bcb2-1cbc-400e-ad31-8d061d7543d6 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1006.930559] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d277bcb2-1cbc-400e-ad31-8d061d7543d6 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg 35e86cdf1d7946b59e3c30a6b3c1d969 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1006.937632] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 35e86cdf1d7946b59e3c30a6b3c1d969 [ 1007.217939] env[62096]: DEBUG oslo_concurrency.lockutils [None req-ac57ab3e-a525-4f99-a9d7-b1c2a0201173 tempest-ServersNegativeTestJSON-1037308393 tempest-ServersNegativeTestJSON-1037308393-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1007.218283] env[62096]: DEBUG oslo_concurrency.lockutils [None req-ac57ab3e-a525-4f99-a9d7-b1c2a0201173 tempest-ServersNegativeTestJSON-1037308393 tempest-ServersNegativeTestJSON-1037308393-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1007.219819] env[62096]: INFO nova.compute.claims [None req-ac57ab3e-a525-4f99-a9d7-b1c2a0201173 tempest-ServersNegativeTestJSON-1037308393 tempest-ServersNegativeTestJSON-1037308393-project-member] [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1007.222248] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ac57ab3e-a525-4f99-a9d7-b1c2a0201173 tempest-ServersNegativeTestJSON-1037308393 tempest-ServersNegativeTestJSON-1037308393-project-member] Expecting reply to msg 60e88a01fa194e249863b4a9a7cefcf5 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1007.259319] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 60e88a01fa194e249863b4a9a7cefcf5 [ 1007.448099] env[62096]: DEBUG nova.network.neutron [None req-d277bcb2-1cbc-400e-ad31-8d061d7543d6 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1007.514657] env[62096]: DEBUG nova.network.neutron [None req-d277bcb2-1cbc-400e-ad31-8d061d7543d6 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1007.515190] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d277bcb2-1cbc-400e-ad31-8d061d7543d6 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg 20a96ab6c0de4515a156202a92ab2f29 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1007.522928] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 20a96ab6c0de4515a156202a92ab2f29 [ 1007.726440] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ac57ab3e-a525-4f99-a9d7-b1c2a0201173 tempest-ServersNegativeTestJSON-1037308393 tempest-ServersNegativeTestJSON-1037308393-project-member] Expecting reply to msg 4f5efae72d75413fb8363364fb431af9 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1007.737683] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4f5efae72d75413fb8363364fb431af9 [ 1007.850746] env[62096]: DEBUG nova.compute.manager [req-5f48342e-27e0-461d-9c15-690a8bae986f req-eb625032-3973-4431-bfcb-784c77ebda19 service nova] [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] Received event network-vif-deleted-6f8d4174-73f4-4c09-ba71-fe3921cddbc6 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 1008.016988] env[62096]: DEBUG oslo_concurrency.lockutils [None req-d277bcb2-1cbc-400e-ad31-8d061d7543d6 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Releasing lock "refresh_cache-ff80d910-cc71-4a08-bd84-4d3a87cfaee2" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1008.017406] env[62096]: DEBUG nova.compute.manager [None req-d277bcb2-1cbc-400e-ad31-8d061d7543d6 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] Start destroying the instance on the hypervisor. {{(pid=62096) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1008.017603] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-d277bcb2-1cbc-400e-ad31-8d061d7543d6 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] Destroying instance {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1008.017894] env[62096]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-82f0fae6-ddca-4a55-a049-13f5bbcedbff {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.026671] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a69348f3-30ae-4159-b75f-aed614f6ea94 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.047114] env[62096]: WARNING nova.virt.vmwareapi.vmops [None req-d277bcb2-1cbc-400e-ad31-8d061d7543d6 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance ff80d910-cc71-4a08-bd84-4d3a87cfaee2 could not be found. [ 1008.047312] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-d277bcb2-1cbc-400e-ad31-8d061d7543d6 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] Instance destroyed {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1008.047486] env[62096]: INFO nova.compute.manager [None req-d277bcb2-1cbc-400e-ad31-8d061d7543d6 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] Took 0.03 seconds to destroy the instance on the hypervisor. [ 1008.047717] env[62096]: DEBUG oslo.service.loopingcall [None req-d277bcb2-1cbc-400e-ad31-8d061d7543d6 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1008.047917] env[62096]: DEBUG nova.compute.manager [-] [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1008.048021] env[62096]: DEBUG nova.network.neutron [-] [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1008.062649] env[62096]: DEBUG nova.network.neutron [-] [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1008.063100] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 5a7dc54b644a40d78676d214b3e3992e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1008.070674] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5a7dc54b644a40d78676d214b3e3992e [ 1008.267573] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f60a2ab1-3b9b-4bc0-aa1a-b5dd8fac3869 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.274808] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7d69c82-ecf3-470d-97a5-337bb0c631e7 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.304742] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e761a208-41fe-476a-9a88-48ac44a8a450 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.311583] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f32a8c7-211b-4a7b-a441-b70779e27dc7 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.324538] env[62096]: DEBUG nova.compute.provider_tree [None req-ac57ab3e-a525-4f99-a9d7-b1c2a0201173 tempest-ServersNegativeTestJSON-1037308393 tempest-ServersNegativeTestJSON-1037308393-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1008.324653] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ac57ab3e-a525-4f99-a9d7-b1c2a0201173 tempest-ServersNegativeTestJSON-1037308393 tempest-ServersNegativeTestJSON-1037308393-project-member] Expecting reply to msg d76a6b1c0d7941a78d5153c7d3c8cc0b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1008.331698] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d76a6b1c0d7941a78d5153c7d3c8cc0b [ 1008.565081] env[62096]: DEBUG nova.network.neutron [-] [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1008.565564] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg c7d09ef0426f4f2baf07b15c79d7dbf0 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1008.573461] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c7d09ef0426f4f2baf07b15c79d7dbf0 [ 1008.827056] env[62096]: DEBUG nova.scheduler.client.report [None req-ac57ab3e-a525-4f99-a9d7-b1c2a0201173 tempest-ServersNegativeTestJSON-1037308393 tempest-ServersNegativeTestJSON-1037308393-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1008.829446] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ac57ab3e-a525-4f99-a9d7-b1c2a0201173 tempest-ServersNegativeTestJSON-1037308393 tempest-ServersNegativeTestJSON-1037308393-project-member] Expecting reply to msg bd675087b09848d1a079ac0f0311ae97 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1008.840417] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bd675087b09848d1a079ac0f0311ae97 [ 1009.067798] env[62096]: INFO nova.compute.manager [-] [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] Took 1.02 seconds to deallocate network for instance. [ 1009.070152] env[62096]: DEBUG nova.compute.claims [None req-d277bcb2-1cbc-400e-ad31-8d061d7543d6 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] Aborting claim: {{(pid=62096) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 1009.070357] env[62096]: DEBUG oslo_concurrency.lockutils [None req-d277bcb2-1cbc-400e-ad31-8d061d7543d6 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1009.332433] env[62096]: DEBUG oslo_concurrency.lockutils [None req-ac57ab3e-a525-4f99-a9d7-b1c2a0201173 tempest-ServersNegativeTestJSON-1037308393 tempest-ServersNegativeTestJSON-1037308393-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.114s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1009.333068] env[62096]: DEBUG nova.compute.manager [None req-ac57ab3e-a525-4f99-a9d7-b1c2a0201173 tempest-ServersNegativeTestJSON-1037308393 tempest-ServersNegativeTestJSON-1037308393-project-member] [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] Start building networks asynchronously for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1009.335562] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ac57ab3e-a525-4f99-a9d7-b1c2a0201173 tempest-ServersNegativeTestJSON-1037308393 tempest-ServersNegativeTestJSON-1037308393-project-member] Expecting reply to msg 8d1d4619d97442fd9d9b31c36fbfdcc8 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1009.337590] env[62096]: DEBUG oslo_concurrency.lockutils [None req-d277bcb2-1cbc-400e-ad31-8d061d7543d6 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.266s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1009.339318] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d277bcb2-1cbc-400e-ad31-8d061d7543d6 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg e1d746c62f2049ff9602d2d8e172eecb in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1009.366334] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8d1d4619d97442fd9d9b31c36fbfdcc8 [ 1009.371202] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e1d746c62f2049ff9602d2d8e172eecb [ 1009.842865] env[62096]: DEBUG nova.compute.utils [None req-ac57ab3e-a525-4f99-a9d7-b1c2a0201173 tempest-ServersNegativeTestJSON-1037308393 tempest-ServersNegativeTestJSON-1037308393-project-member] Using /dev/sd instead of None {{(pid=62096) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1009.843494] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ac57ab3e-a525-4f99-a9d7-b1c2a0201173 tempest-ServersNegativeTestJSON-1037308393 tempest-ServersNegativeTestJSON-1037308393-project-member] Expecting reply to msg 9a50d7c49eea44a6b7d647d262c3248a in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1009.845411] env[62096]: DEBUG nova.compute.manager [None req-ac57ab3e-a525-4f99-a9d7-b1c2a0201173 tempest-ServersNegativeTestJSON-1037308393 tempest-ServersNegativeTestJSON-1037308393-project-member] [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] Allocating IP information in the background. {{(pid=62096) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1009.845697] env[62096]: DEBUG nova.network.neutron [None req-ac57ab3e-a525-4f99-a9d7-b1c2a0201173 tempest-ServersNegativeTestJSON-1037308393 tempest-ServersNegativeTestJSON-1037308393-project-member] [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] allocate_for_instance() {{(pid=62096) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1009.856123] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9a50d7c49eea44a6b7d647d262c3248a [ 1009.884558] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e1f4981-ad4e-4a08-892c-c18070d1e8cb {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.888640] env[62096]: DEBUG nova.policy [None req-ac57ab3e-a525-4f99-a9d7-b1c2a0201173 tempest-ServersNegativeTestJSON-1037308393 tempest-ServersNegativeTestJSON-1037308393-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e052dcac3461468081ac1fc495254750', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '287b0b05cfa24174bc92dba518d6185f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62096) authorize /opt/stack/nova/nova/policy.py:203}} [ 1009.895570] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f74f9196-2c91-4a8e-82e6-22277f6d75f4 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.925842] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72a8040b-38f1-4453-8ece-af74cf033e27 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.932818] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ec658f8-4993-4097-8997-e3c685ce703a {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.945784] env[62096]: DEBUG nova.compute.provider_tree [None req-d277bcb2-1cbc-400e-ad31-8d061d7543d6 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1009.946268] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d277bcb2-1cbc-400e-ad31-8d061d7543d6 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg f0d62ef5d02f467d8251065021c56eec in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1009.953371] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f0d62ef5d02f467d8251065021c56eec [ 1010.145690] env[62096]: DEBUG nova.network.neutron [None req-ac57ab3e-a525-4f99-a9d7-b1c2a0201173 tempest-ServersNegativeTestJSON-1037308393 tempest-ServersNegativeTestJSON-1037308393-project-member] [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] Successfully created port: 5a4fb4c4-62a4-43ef-ada0-710c6942bbbd {{(pid=62096) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1010.346239] env[62096]: DEBUG nova.compute.manager [None req-ac57ab3e-a525-4f99-a9d7-b1c2a0201173 tempest-ServersNegativeTestJSON-1037308393 tempest-ServersNegativeTestJSON-1037308393-project-member] [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] Start building block device mappings for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1010.347981] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ac57ab3e-a525-4f99-a9d7-b1c2a0201173 tempest-ServersNegativeTestJSON-1037308393 tempest-ServersNegativeTestJSON-1037308393-project-member] Expecting reply to msg 08a33491b91245aab4fb6df1502f4883 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1010.378009] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 08a33491b91245aab4fb6df1502f4883 [ 1010.449215] env[62096]: DEBUG nova.scheduler.client.report [None req-d277bcb2-1cbc-400e-ad31-8d061d7543d6 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1010.451649] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d277bcb2-1cbc-400e-ad31-8d061d7543d6 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg dd0fadeb7d77455090fd366c18fbebb0 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1010.463403] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dd0fadeb7d77455090fd366c18fbebb0 [ 1010.713450] env[62096]: DEBUG nova.compute.manager [req-03ff9f36-c4f6-401c-b749-469411905519 req-552e4cd6-9499-47d2-b19c-229dd1085240 service nova] [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] Received event network-changed-5a4fb4c4-62a4-43ef-ada0-710c6942bbbd {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 1010.713652] env[62096]: DEBUG nova.compute.manager [req-03ff9f36-c4f6-401c-b749-469411905519 req-552e4cd6-9499-47d2-b19c-229dd1085240 service nova] [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] Refreshing instance network info cache due to event network-changed-5a4fb4c4-62a4-43ef-ada0-710c6942bbbd. {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 1010.713865] env[62096]: DEBUG oslo_concurrency.lockutils [req-03ff9f36-c4f6-401c-b749-469411905519 req-552e4cd6-9499-47d2-b19c-229dd1085240 service nova] Acquiring lock "refresh_cache-94a8faf3-cbd0-4a08-b93b-2c37bb040afe" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1010.714005] env[62096]: DEBUG oslo_concurrency.lockutils [req-03ff9f36-c4f6-401c-b749-469411905519 req-552e4cd6-9499-47d2-b19c-229dd1085240 service nova] Acquired lock "refresh_cache-94a8faf3-cbd0-4a08-b93b-2c37bb040afe" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1010.714167] env[62096]: DEBUG nova.network.neutron [req-03ff9f36-c4f6-401c-b749-469411905519 req-552e4cd6-9499-47d2-b19c-229dd1085240 service nova] [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] Refreshing network info cache for port 5a4fb4c4-62a4-43ef-ada0-710c6942bbbd {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1010.714694] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-03ff9f36-c4f6-401c-b749-469411905519 req-552e4cd6-9499-47d2-b19c-229dd1085240 service nova] Expecting reply to msg bea43cbd31c144dfb52bd37c0f615e4c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1010.721866] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bea43cbd31c144dfb52bd37c0f615e4c [ 1010.853297] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ac57ab3e-a525-4f99-a9d7-b1c2a0201173 tempest-ServersNegativeTestJSON-1037308393 tempest-ServersNegativeTestJSON-1037308393-project-member] Expecting reply to msg e0db0e2d89ce4ddd932fecc2951d541c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1010.877253] env[62096]: ERROR nova.compute.manager [None req-ac57ab3e-a525-4f99-a9d7-b1c2a0201173 tempest-ServersNegativeTestJSON-1037308393 tempest-ServersNegativeTestJSON-1037308393-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 5a4fb4c4-62a4-43ef-ada0-710c6942bbbd, please check neutron logs for more information. [ 1010.877253] env[62096]: ERROR nova.compute.manager Traceback (most recent call last): [ 1010.877253] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 1010.877253] env[62096]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 1010.877253] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 1010.877253] env[62096]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 1010.877253] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 1010.877253] env[62096]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 1010.877253] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1010.877253] env[62096]: ERROR nova.compute.manager self.force_reraise() [ 1010.877253] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1010.877253] env[62096]: ERROR nova.compute.manager raise self.value [ 1010.877253] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 1010.877253] env[62096]: ERROR nova.compute.manager updated_port = self._update_port( [ 1010.877253] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1010.877253] env[62096]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 1010.877790] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1010.877790] env[62096]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 1010.877790] env[62096]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 5a4fb4c4-62a4-43ef-ada0-710c6942bbbd, please check neutron logs for more information. [ 1010.877790] env[62096]: ERROR nova.compute.manager [ 1010.877790] env[62096]: Traceback (most recent call last): [ 1010.877790] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 1010.877790] env[62096]: listener.cb(fileno) [ 1010.877790] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1010.877790] env[62096]: result = function(*args, **kwargs) [ 1010.877790] env[62096]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 1010.877790] env[62096]: return func(*args, **kwargs) [ 1010.877790] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 1010.877790] env[62096]: raise e [ 1010.877790] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 1010.877790] env[62096]: nwinfo = self.network_api.allocate_for_instance( [ 1010.877790] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 1010.877790] env[62096]: created_port_ids = self._update_ports_for_instance( [ 1010.877790] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 1010.877790] env[62096]: with excutils.save_and_reraise_exception(): [ 1010.877790] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1010.877790] env[62096]: self.force_reraise() [ 1010.877790] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1010.877790] env[62096]: raise self.value [ 1010.877790] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 1010.877790] env[62096]: updated_port = self._update_port( [ 1010.877790] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1010.877790] env[62096]: _ensure_no_port_binding_failure(port) [ 1010.877790] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1010.877790] env[62096]: raise exception.PortBindingFailed(port_id=port['id']) [ 1010.878821] env[62096]: nova.exception.PortBindingFailed: Binding failed for port 5a4fb4c4-62a4-43ef-ada0-710c6942bbbd, please check neutron logs for more information. [ 1010.878821] env[62096]: Removing descriptor: 14 [ 1010.883233] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e0db0e2d89ce4ddd932fecc2951d541c [ 1010.954514] env[62096]: DEBUG oslo_concurrency.lockutils [None req-d277bcb2-1cbc-400e-ad31-8d061d7543d6 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.618s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1010.955355] env[62096]: ERROR nova.compute.manager [None req-d277bcb2-1cbc-400e-ad31-8d061d7543d6 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 6f8d4174-73f4-4c09-ba71-fe3921cddbc6, please check neutron logs for more information. [ 1010.955355] env[62096]: ERROR nova.compute.manager [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] Traceback (most recent call last): [ 1010.955355] env[62096]: ERROR nova.compute.manager [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 1010.955355] env[62096]: ERROR nova.compute.manager [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] self.driver.spawn(context, instance, image_meta, [ 1010.955355] env[62096]: ERROR nova.compute.manager [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1010.955355] env[62096]: ERROR nova.compute.manager [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1010.955355] env[62096]: ERROR nova.compute.manager [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 1010.955355] env[62096]: ERROR nova.compute.manager [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] vm_ref = self.build_virtual_machine(instance, [ 1010.955355] env[62096]: ERROR nova.compute.manager [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 1010.955355] env[62096]: ERROR nova.compute.manager [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] vif_infos = vmwarevif.get_vif_info(self._session, [ 1010.955355] env[62096]: ERROR nova.compute.manager [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 1010.955781] env[62096]: ERROR nova.compute.manager [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] for vif in network_info: [ 1010.955781] env[62096]: ERROR nova.compute.manager [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 1010.955781] env[62096]: ERROR nova.compute.manager [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] return self._sync_wrapper(fn, *args, **kwargs) [ 1010.955781] env[62096]: ERROR nova.compute.manager [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 1010.955781] env[62096]: ERROR nova.compute.manager [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] self.wait() [ 1010.955781] env[62096]: ERROR nova.compute.manager [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 1010.955781] env[62096]: ERROR nova.compute.manager [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] self[:] = self._gt.wait() [ 1010.955781] env[62096]: ERROR nova.compute.manager [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 1010.955781] env[62096]: ERROR nova.compute.manager [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] return self._exit_event.wait() [ 1010.955781] env[62096]: ERROR nova.compute.manager [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 1010.955781] env[62096]: ERROR nova.compute.manager [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] current.throw(*self._exc) [ 1010.955781] env[62096]: ERROR nova.compute.manager [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1010.955781] env[62096]: ERROR nova.compute.manager [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] result = function(*args, **kwargs) [ 1010.956183] env[62096]: ERROR nova.compute.manager [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 1010.956183] env[62096]: ERROR nova.compute.manager [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] return func(*args, **kwargs) [ 1010.956183] env[62096]: ERROR nova.compute.manager [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 1010.956183] env[62096]: ERROR nova.compute.manager [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] raise e [ 1010.956183] env[62096]: ERROR nova.compute.manager [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 1010.956183] env[62096]: ERROR nova.compute.manager [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] nwinfo = self.network_api.allocate_for_instance( [ 1010.956183] env[62096]: ERROR nova.compute.manager [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 1010.956183] env[62096]: ERROR nova.compute.manager [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] created_port_ids = self._update_ports_for_instance( [ 1010.956183] env[62096]: ERROR nova.compute.manager [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 1010.956183] env[62096]: ERROR nova.compute.manager [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] with excutils.save_and_reraise_exception(): [ 1010.956183] env[62096]: ERROR nova.compute.manager [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1010.956183] env[62096]: ERROR nova.compute.manager [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] self.force_reraise() [ 1010.956183] env[62096]: ERROR nova.compute.manager [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1010.956550] env[62096]: ERROR nova.compute.manager [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] raise self.value [ 1010.956550] env[62096]: ERROR nova.compute.manager [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 1010.956550] env[62096]: ERROR nova.compute.manager [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] updated_port = self._update_port( [ 1010.956550] env[62096]: ERROR nova.compute.manager [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1010.956550] env[62096]: ERROR nova.compute.manager [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] _ensure_no_port_binding_failure(port) [ 1010.956550] env[62096]: ERROR nova.compute.manager [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1010.956550] env[62096]: ERROR nova.compute.manager [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] raise exception.PortBindingFailed(port_id=port['id']) [ 1010.956550] env[62096]: ERROR nova.compute.manager [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] nova.exception.PortBindingFailed: Binding failed for port 6f8d4174-73f4-4c09-ba71-fe3921cddbc6, please check neutron logs for more information. [ 1010.956550] env[62096]: ERROR nova.compute.manager [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] [ 1010.956799] env[62096]: DEBUG nova.compute.utils [None req-d277bcb2-1cbc-400e-ad31-8d061d7543d6 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] Binding failed for port 6f8d4174-73f4-4c09-ba71-fe3921cddbc6, please check neutron logs for more information. {{(pid=62096) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1010.958272] env[62096]: DEBUG nova.compute.manager [None req-d277bcb2-1cbc-400e-ad31-8d061d7543d6 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] Build of instance ff80d910-cc71-4a08-bd84-4d3a87cfaee2 was re-scheduled: Binding failed for port 6f8d4174-73f4-4c09-ba71-fe3921cddbc6, please check neutron logs for more information. {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 1010.958690] env[62096]: DEBUG nova.compute.manager [None req-d277bcb2-1cbc-400e-ad31-8d061d7543d6 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] Unplugging VIFs for instance {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 1010.958916] env[62096]: DEBUG oslo_concurrency.lockutils [None req-d277bcb2-1cbc-400e-ad31-8d061d7543d6 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Acquiring lock "refresh_cache-ff80d910-cc71-4a08-bd84-4d3a87cfaee2" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1010.959065] env[62096]: DEBUG oslo_concurrency.lockutils [None req-d277bcb2-1cbc-400e-ad31-8d061d7543d6 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Acquired lock "refresh_cache-ff80d910-cc71-4a08-bd84-4d3a87cfaee2" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1010.959225] env[62096]: DEBUG nova.network.neutron [None req-d277bcb2-1cbc-400e-ad31-8d061d7543d6 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1010.959631] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d277bcb2-1cbc-400e-ad31-8d061d7543d6 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg 249aa083befb467282447b9c55105e82 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1010.966083] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 249aa083befb467282447b9c55105e82 [ 1011.230764] env[62096]: DEBUG nova.network.neutron [req-03ff9f36-c4f6-401c-b749-469411905519 req-552e4cd6-9499-47d2-b19c-229dd1085240 service nova] [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1011.299509] env[62096]: DEBUG nova.network.neutron [req-03ff9f36-c4f6-401c-b749-469411905519 req-552e4cd6-9499-47d2-b19c-229dd1085240 service nova] [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1011.300026] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-03ff9f36-c4f6-401c-b749-469411905519 req-552e4cd6-9499-47d2-b19c-229dd1085240 service nova] Expecting reply to msg 995f28b770974a1b9aabb45a3d8e4ccf in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1011.309544] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 995f28b770974a1b9aabb45a3d8e4ccf [ 1011.356651] env[62096]: DEBUG nova.compute.manager [None req-ac57ab3e-a525-4f99-a9d7-b1c2a0201173 tempest-ServersNegativeTestJSON-1037308393 tempest-ServersNegativeTestJSON-1037308393-project-member] [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] Start spawning the instance on the hypervisor. {{(pid=62096) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1011.380941] env[62096]: DEBUG nova.virt.hardware [None req-ac57ab3e-a525-4f99-a9d7-b1c2a0201173 tempest-ServersNegativeTestJSON-1037308393 tempest-ServersNegativeTestJSON-1037308393-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-19T11:52:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-19T11:51:55Z,direct_url=,disk_format='vmdk',id=fb764baa-3805-45c4-a694-aa91b0932110,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bd76085d004b408bbcf1ebf76ba2593e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-19T11:51:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1011.381182] env[62096]: DEBUG nova.virt.hardware [None req-ac57ab3e-a525-4f99-a9d7-b1c2a0201173 tempest-ServersNegativeTestJSON-1037308393 tempest-ServersNegativeTestJSON-1037308393-project-member] Flavor limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1011.381341] env[62096]: DEBUG nova.virt.hardware [None req-ac57ab3e-a525-4f99-a9d7-b1c2a0201173 tempest-ServersNegativeTestJSON-1037308393 tempest-ServersNegativeTestJSON-1037308393-project-member] Image limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1011.381519] env[62096]: DEBUG nova.virt.hardware [None req-ac57ab3e-a525-4f99-a9d7-b1c2a0201173 tempest-ServersNegativeTestJSON-1037308393 tempest-ServersNegativeTestJSON-1037308393-project-member] Flavor pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1011.381662] env[62096]: DEBUG nova.virt.hardware [None req-ac57ab3e-a525-4f99-a9d7-b1c2a0201173 tempest-ServersNegativeTestJSON-1037308393 tempest-ServersNegativeTestJSON-1037308393-project-member] Image pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1011.381806] env[62096]: DEBUG nova.virt.hardware [None req-ac57ab3e-a525-4f99-a9d7-b1c2a0201173 tempest-ServersNegativeTestJSON-1037308393 tempest-ServersNegativeTestJSON-1037308393-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1011.382030] env[62096]: DEBUG nova.virt.hardware [None req-ac57ab3e-a525-4f99-a9d7-b1c2a0201173 tempest-ServersNegativeTestJSON-1037308393 tempest-ServersNegativeTestJSON-1037308393-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1011.382195] env[62096]: DEBUG nova.virt.hardware [None req-ac57ab3e-a525-4f99-a9d7-b1c2a0201173 tempest-ServersNegativeTestJSON-1037308393 tempest-ServersNegativeTestJSON-1037308393-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1011.382358] env[62096]: DEBUG nova.virt.hardware [None req-ac57ab3e-a525-4f99-a9d7-b1c2a0201173 tempest-ServersNegativeTestJSON-1037308393 tempest-ServersNegativeTestJSON-1037308393-project-member] Got 1 possible topologies {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1011.382515] env[62096]: DEBUG nova.virt.hardware [None req-ac57ab3e-a525-4f99-a9d7-b1c2a0201173 tempest-ServersNegativeTestJSON-1037308393 tempest-ServersNegativeTestJSON-1037308393-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1011.382683] env[62096]: DEBUG nova.virt.hardware [None req-ac57ab3e-a525-4f99-a9d7-b1c2a0201173 tempest-ServersNegativeTestJSON-1037308393 tempest-ServersNegativeTestJSON-1037308393-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1011.383643] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f923cf0f-86df-462a-97c5-87c8b15ef8ce {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.391620] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-915cfc68-0e11-43c7-8851-f2efbe964403 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.405741] env[62096]: ERROR nova.compute.manager [None req-ac57ab3e-a525-4f99-a9d7-b1c2a0201173 tempest-ServersNegativeTestJSON-1037308393 tempest-ServersNegativeTestJSON-1037308393-project-member] [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 5a4fb4c4-62a4-43ef-ada0-710c6942bbbd, please check neutron logs for more information. [ 1011.405741] env[62096]: ERROR nova.compute.manager [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] Traceback (most recent call last): [ 1011.405741] env[62096]: ERROR nova.compute.manager [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 1011.405741] env[62096]: ERROR nova.compute.manager [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] yield resources [ 1011.405741] env[62096]: ERROR nova.compute.manager [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 1011.405741] env[62096]: ERROR nova.compute.manager [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] self.driver.spawn(context, instance, image_meta, [ 1011.405741] env[62096]: ERROR nova.compute.manager [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1011.405741] env[62096]: ERROR nova.compute.manager [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1011.405741] env[62096]: ERROR nova.compute.manager [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 1011.405741] env[62096]: ERROR nova.compute.manager [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] vm_ref = self.build_virtual_machine(instance, [ 1011.405741] env[62096]: ERROR nova.compute.manager [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 1011.406361] env[62096]: ERROR nova.compute.manager [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] vif_infos = vmwarevif.get_vif_info(self._session, [ 1011.406361] env[62096]: ERROR nova.compute.manager [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 1011.406361] env[62096]: ERROR nova.compute.manager [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] for vif in network_info: [ 1011.406361] env[62096]: ERROR nova.compute.manager [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 1011.406361] env[62096]: ERROR nova.compute.manager [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] return self._sync_wrapper(fn, *args, **kwargs) [ 1011.406361] env[62096]: ERROR nova.compute.manager [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 1011.406361] env[62096]: ERROR nova.compute.manager [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] self.wait() [ 1011.406361] env[62096]: ERROR nova.compute.manager [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 1011.406361] env[62096]: ERROR nova.compute.manager [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] self[:] = self._gt.wait() [ 1011.406361] env[62096]: ERROR nova.compute.manager [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 1011.406361] env[62096]: ERROR nova.compute.manager [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] return self._exit_event.wait() [ 1011.406361] env[62096]: ERROR nova.compute.manager [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 1011.406361] env[62096]: ERROR nova.compute.manager [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] current.throw(*self._exc) [ 1011.406977] env[62096]: ERROR nova.compute.manager [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1011.406977] env[62096]: ERROR nova.compute.manager [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] result = function(*args, **kwargs) [ 1011.406977] env[62096]: ERROR nova.compute.manager [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 1011.406977] env[62096]: ERROR nova.compute.manager [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] return func(*args, **kwargs) [ 1011.406977] env[62096]: ERROR nova.compute.manager [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 1011.406977] env[62096]: ERROR nova.compute.manager [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] raise e [ 1011.406977] env[62096]: ERROR nova.compute.manager [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 1011.406977] env[62096]: ERROR nova.compute.manager [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] nwinfo = self.network_api.allocate_for_instance( [ 1011.406977] env[62096]: ERROR nova.compute.manager [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 1011.406977] env[62096]: ERROR nova.compute.manager [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] created_port_ids = self._update_ports_for_instance( [ 1011.406977] env[62096]: ERROR nova.compute.manager [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 1011.406977] env[62096]: ERROR nova.compute.manager [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] with excutils.save_and_reraise_exception(): [ 1011.406977] env[62096]: ERROR nova.compute.manager [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1011.407596] env[62096]: ERROR nova.compute.manager [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] self.force_reraise() [ 1011.407596] env[62096]: ERROR nova.compute.manager [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1011.407596] env[62096]: ERROR nova.compute.manager [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] raise self.value [ 1011.407596] env[62096]: ERROR nova.compute.manager [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 1011.407596] env[62096]: ERROR nova.compute.manager [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] updated_port = self._update_port( [ 1011.407596] env[62096]: ERROR nova.compute.manager [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1011.407596] env[62096]: ERROR nova.compute.manager [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] _ensure_no_port_binding_failure(port) [ 1011.407596] env[62096]: ERROR nova.compute.manager [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1011.407596] env[62096]: ERROR nova.compute.manager [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] raise exception.PortBindingFailed(port_id=port['id']) [ 1011.407596] env[62096]: ERROR nova.compute.manager [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] nova.exception.PortBindingFailed: Binding failed for port 5a4fb4c4-62a4-43ef-ada0-710c6942bbbd, please check neutron logs for more information. [ 1011.407596] env[62096]: ERROR nova.compute.manager [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] [ 1011.407596] env[62096]: INFO nova.compute.manager [None req-ac57ab3e-a525-4f99-a9d7-b1c2a0201173 tempest-ServersNegativeTestJSON-1037308393 tempest-ServersNegativeTestJSON-1037308393-project-member] [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] Terminating instance [ 1011.408304] env[62096]: DEBUG oslo_concurrency.lockutils [None req-ac57ab3e-a525-4f99-a9d7-b1c2a0201173 tempest-ServersNegativeTestJSON-1037308393 tempest-ServersNegativeTestJSON-1037308393-project-member] Acquiring lock "refresh_cache-94a8faf3-cbd0-4a08-b93b-2c37bb040afe" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1011.476594] env[62096]: DEBUG nova.network.neutron [None req-d277bcb2-1cbc-400e-ad31-8d061d7543d6 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1011.543318] env[62096]: DEBUG nova.network.neutron [None req-d277bcb2-1cbc-400e-ad31-8d061d7543d6 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1011.543843] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d277bcb2-1cbc-400e-ad31-8d061d7543d6 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg c40131917a7c495c97547fcad0ae59ee in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1011.552127] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c40131917a7c495c97547fcad0ae59ee [ 1011.802072] env[62096]: DEBUG oslo_concurrency.lockutils [req-03ff9f36-c4f6-401c-b749-469411905519 req-552e4cd6-9499-47d2-b19c-229dd1085240 service nova] Releasing lock "refresh_cache-94a8faf3-cbd0-4a08-b93b-2c37bb040afe" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1011.802574] env[62096]: DEBUG oslo_concurrency.lockutils [None req-ac57ab3e-a525-4f99-a9d7-b1c2a0201173 tempest-ServersNegativeTestJSON-1037308393 tempest-ServersNegativeTestJSON-1037308393-project-member] Acquired lock "refresh_cache-94a8faf3-cbd0-4a08-b93b-2c37bb040afe" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1011.802763] env[62096]: DEBUG nova.network.neutron [None req-ac57ab3e-a525-4f99-a9d7-b1c2a0201173 tempest-ServersNegativeTestJSON-1037308393 tempest-ServersNegativeTestJSON-1037308393-project-member] [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1011.803207] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ac57ab3e-a525-4f99-a9d7-b1c2a0201173 tempest-ServersNegativeTestJSON-1037308393 tempest-ServersNegativeTestJSON-1037308393-project-member] Expecting reply to msg 2b217448907a48bf888835f502e43119 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1011.810805] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2b217448907a48bf888835f502e43119 [ 1012.046797] env[62096]: DEBUG oslo_concurrency.lockutils [None req-d277bcb2-1cbc-400e-ad31-8d061d7543d6 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Releasing lock "refresh_cache-ff80d910-cc71-4a08-bd84-4d3a87cfaee2" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1012.047130] env[62096]: DEBUG nova.compute.manager [None req-d277bcb2-1cbc-400e-ad31-8d061d7543d6 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 1012.047394] env[62096]: DEBUG nova.compute.manager [None req-d277bcb2-1cbc-400e-ad31-8d061d7543d6 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1012.047631] env[62096]: DEBUG nova.network.neutron [None req-d277bcb2-1cbc-400e-ad31-8d061d7543d6 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1012.062484] env[62096]: DEBUG nova.network.neutron [None req-d277bcb2-1cbc-400e-ad31-8d061d7543d6 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1012.063001] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d277bcb2-1cbc-400e-ad31-8d061d7543d6 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg d834770a253744dba87a7bd1499cab71 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1012.071522] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d834770a253744dba87a7bd1499cab71 [ 1012.320828] env[62096]: DEBUG nova.network.neutron [None req-ac57ab3e-a525-4f99-a9d7-b1c2a0201173 tempest-ServersNegativeTestJSON-1037308393 tempest-ServersNegativeTestJSON-1037308393-project-member] [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1012.397150] env[62096]: DEBUG nova.network.neutron [None req-ac57ab3e-a525-4f99-a9d7-b1c2a0201173 tempest-ServersNegativeTestJSON-1037308393 tempest-ServersNegativeTestJSON-1037308393-project-member] [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1012.397685] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ac57ab3e-a525-4f99-a9d7-b1c2a0201173 tempest-ServersNegativeTestJSON-1037308393 tempest-ServersNegativeTestJSON-1037308393-project-member] Expecting reply to msg ffef553eb2554e4996fd26e7c25829ff in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1012.405954] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ffef553eb2554e4996fd26e7c25829ff [ 1012.565864] env[62096]: DEBUG nova.network.neutron [None req-d277bcb2-1cbc-400e-ad31-8d061d7543d6 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1012.566389] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d277bcb2-1cbc-400e-ad31-8d061d7543d6 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg f620973358334403a85b4ec0d4ee92b9 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1012.575102] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f620973358334403a85b4ec0d4ee92b9 [ 1012.755522] env[62096]: DEBUG nova.compute.manager [req-792f45c4-630c-436e-86b4-2adc15f58499 req-05f9b5eb-31ab-49f3-b5f8-a5b4831f15b4 service nova] [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] Received event network-vif-deleted-5a4fb4c4-62a4-43ef-ada0-710c6942bbbd {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 1012.900352] env[62096]: DEBUG oslo_concurrency.lockutils [None req-ac57ab3e-a525-4f99-a9d7-b1c2a0201173 tempest-ServersNegativeTestJSON-1037308393 tempest-ServersNegativeTestJSON-1037308393-project-member] Releasing lock "refresh_cache-94a8faf3-cbd0-4a08-b93b-2c37bb040afe" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1012.900706] env[62096]: DEBUG nova.compute.manager [None req-ac57ab3e-a525-4f99-a9d7-b1c2a0201173 tempest-ServersNegativeTestJSON-1037308393 tempest-ServersNegativeTestJSON-1037308393-project-member] [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] Start destroying the instance on the hypervisor. {{(pid=62096) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1012.900900] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-ac57ab3e-a525-4f99-a9d7-b1c2a0201173 tempest-ServersNegativeTestJSON-1037308393 tempest-ServersNegativeTestJSON-1037308393-project-member] [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] Destroying instance {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1012.901199] env[62096]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1a57e789-8471-45da-94b1-1de6208ce101 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.910488] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd26ae81-1cd4-468e-a913-40584763f00f {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.931246] env[62096]: WARNING nova.virt.vmwareapi.vmops [None req-ac57ab3e-a525-4f99-a9d7-b1c2a0201173 tempest-ServersNegativeTestJSON-1037308393 tempest-ServersNegativeTestJSON-1037308393-project-member] [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 94a8faf3-cbd0-4a08-b93b-2c37bb040afe could not be found. [ 1012.931435] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-ac57ab3e-a525-4f99-a9d7-b1c2a0201173 tempest-ServersNegativeTestJSON-1037308393 tempest-ServersNegativeTestJSON-1037308393-project-member] [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] Instance destroyed {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1012.931617] env[62096]: INFO nova.compute.manager [None req-ac57ab3e-a525-4f99-a9d7-b1c2a0201173 tempest-ServersNegativeTestJSON-1037308393 tempest-ServersNegativeTestJSON-1037308393-project-member] [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] Took 0.03 seconds to destroy the instance on the hypervisor. [ 1012.931853] env[62096]: DEBUG oslo.service.loopingcall [None req-ac57ab3e-a525-4f99-a9d7-b1c2a0201173 tempest-ServersNegativeTestJSON-1037308393 tempest-ServersNegativeTestJSON-1037308393-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1012.932094] env[62096]: DEBUG nova.compute.manager [-] [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1012.932193] env[62096]: DEBUG nova.network.neutron [-] [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1012.946219] env[62096]: DEBUG nova.network.neutron [-] [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1012.946670] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg f6705fe0b8604327a22b096c44fc9592 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1012.953221] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f6705fe0b8604327a22b096c44fc9592 [ 1013.069371] env[62096]: INFO nova.compute.manager [None req-d277bcb2-1cbc-400e-ad31-8d061d7543d6 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] [instance: ff80d910-cc71-4a08-bd84-4d3a87cfaee2] Took 1.02 seconds to deallocate network for instance. [ 1013.071248] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d277bcb2-1cbc-400e-ad31-8d061d7543d6 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg fdb54d998f29405b8aca5879a7ae6678 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1013.103625] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fdb54d998f29405b8aca5879a7ae6678 [ 1013.448436] env[62096]: DEBUG nova.network.neutron [-] [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1013.448924] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg c8ffdf38edc340d0b39303b7a5017e93 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1013.457540] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c8ffdf38edc340d0b39303b7a5017e93 [ 1013.576323] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d277bcb2-1cbc-400e-ad31-8d061d7543d6 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg 48f82e6c3abb40feafef4aaed112aef2 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1013.609314] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 48f82e6c3abb40feafef4aaed112aef2 [ 1013.950993] env[62096]: INFO nova.compute.manager [-] [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] Took 1.02 seconds to deallocate network for instance. [ 1013.953315] env[62096]: DEBUG nova.compute.claims [None req-ac57ab3e-a525-4f99-a9d7-b1c2a0201173 tempest-ServersNegativeTestJSON-1037308393 tempest-ServersNegativeTestJSON-1037308393-project-member] [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] Aborting claim: {{(pid=62096) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 1013.953501] env[62096]: DEBUG oslo_concurrency.lockutils [None req-ac57ab3e-a525-4f99-a9d7-b1c2a0201173 tempest-ServersNegativeTestJSON-1037308393 tempest-ServersNegativeTestJSON-1037308393-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1013.953716] env[62096]: DEBUG oslo_concurrency.lockutils [None req-ac57ab3e-a525-4f99-a9d7-b1c2a0201173 tempest-ServersNegativeTestJSON-1037308393 tempest-ServersNegativeTestJSON-1037308393-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1013.955522] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ac57ab3e-a525-4f99-a9d7-b1c2a0201173 tempest-ServersNegativeTestJSON-1037308393 tempest-ServersNegativeTestJSON-1037308393-project-member] Expecting reply to msg 900dd6b043eb447ca2a2763893b1a9cc in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1013.986119] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 900dd6b043eb447ca2a2763893b1a9cc [ 1014.099895] env[62096]: INFO nova.scheduler.client.report [None req-d277bcb2-1cbc-400e-ad31-8d061d7543d6 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Deleted allocations for instance ff80d910-cc71-4a08-bd84-4d3a87cfaee2 [ 1014.106079] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-d277bcb2-1cbc-400e-ad31-8d061d7543d6 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Expecting reply to msg c780645909be4a39956971865174e1be in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1014.136364] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c780645909be4a39956971865174e1be [ 1014.494990] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb9512b7-c3fb-4705-a855-684631e0467d {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.507924] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d3251c3-ebf8-42f6-b872-368ee64af073 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.550594] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3880dc03-bb57-4d8a-80ea-b94cc1803866 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.558338] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47ea9b1a-2ece-4213-9c2e-1b588f993720 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.570996] env[62096]: DEBUG nova.compute.provider_tree [None req-ac57ab3e-a525-4f99-a9d7-b1c2a0201173 tempest-ServersNegativeTestJSON-1037308393 tempest-ServersNegativeTestJSON-1037308393-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1014.571486] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ac57ab3e-a525-4f99-a9d7-b1c2a0201173 tempest-ServersNegativeTestJSON-1037308393 tempest-ServersNegativeTestJSON-1037308393-project-member] Expecting reply to msg 17c23e888fcb46f48047968994c245f1 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1014.580350] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 17c23e888fcb46f48047968994c245f1 [ 1014.608240] env[62096]: DEBUG oslo_concurrency.lockutils [None req-d277bcb2-1cbc-400e-ad31-8d061d7543d6 tempest-DeleteServersTestJSON-654204352 tempest-DeleteServersTestJSON-654204352-project-member] Lock "ff80d910-cc71-4a08-bd84-4d3a87cfaee2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.649s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1015.074519] env[62096]: DEBUG nova.scheduler.client.report [None req-ac57ab3e-a525-4f99-a9d7-b1c2a0201173 tempest-ServersNegativeTestJSON-1037308393 tempest-ServersNegativeTestJSON-1037308393-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1015.076875] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ac57ab3e-a525-4f99-a9d7-b1c2a0201173 tempest-ServersNegativeTestJSON-1037308393 tempest-ServersNegativeTestJSON-1037308393-project-member] Expecting reply to msg a5cd256e786c423bba9be617b682fddb in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1015.087348] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a5cd256e786c423bba9be617b682fddb [ 1015.579969] env[62096]: DEBUG oslo_concurrency.lockutils [None req-ac57ab3e-a525-4f99-a9d7-b1c2a0201173 tempest-ServersNegativeTestJSON-1037308393 tempest-ServersNegativeTestJSON-1037308393-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.625s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1015.579969] env[62096]: ERROR nova.compute.manager [None req-ac57ab3e-a525-4f99-a9d7-b1c2a0201173 tempest-ServersNegativeTestJSON-1037308393 tempest-ServersNegativeTestJSON-1037308393-project-member] [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 5a4fb4c4-62a4-43ef-ada0-710c6942bbbd, please check neutron logs for more information. [ 1015.579969] env[62096]: ERROR nova.compute.manager [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] Traceback (most recent call last): [ 1015.579969] env[62096]: ERROR nova.compute.manager [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 1015.579969] env[62096]: ERROR nova.compute.manager [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] self.driver.spawn(context, instance, image_meta, [ 1015.579969] env[62096]: ERROR nova.compute.manager [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1015.579969] env[62096]: ERROR nova.compute.manager [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1015.579969] env[62096]: ERROR nova.compute.manager [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 1015.579969] env[62096]: ERROR nova.compute.manager [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] vm_ref = self.build_virtual_machine(instance, [ 1015.580551] env[62096]: ERROR nova.compute.manager [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 1015.580551] env[62096]: ERROR nova.compute.manager [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] vif_infos = vmwarevif.get_vif_info(self._session, [ 1015.580551] env[62096]: ERROR nova.compute.manager [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 1015.580551] env[62096]: ERROR nova.compute.manager [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] for vif in network_info: [ 1015.580551] env[62096]: ERROR nova.compute.manager [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 1015.580551] env[62096]: ERROR nova.compute.manager [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] return self._sync_wrapper(fn, *args, **kwargs) [ 1015.580551] env[62096]: ERROR nova.compute.manager [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 1015.580551] env[62096]: ERROR nova.compute.manager [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] self.wait() [ 1015.580551] env[62096]: ERROR nova.compute.manager [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 1015.580551] env[62096]: ERROR nova.compute.manager [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] self[:] = self._gt.wait() [ 1015.580551] env[62096]: ERROR nova.compute.manager [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 1015.580551] env[62096]: ERROR nova.compute.manager [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] return self._exit_event.wait() [ 1015.580551] env[62096]: ERROR nova.compute.manager [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 1015.580963] env[62096]: ERROR nova.compute.manager [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] current.throw(*self._exc) [ 1015.580963] env[62096]: ERROR nova.compute.manager [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1015.580963] env[62096]: ERROR nova.compute.manager [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] result = function(*args, **kwargs) [ 1015.580963] env[62096]: ERROR nova.compute.manager [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 1015.580963] env[62096]: ERROR nova.compute.manager [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] return func(*args, **kwargs) [ 1015.580963] env[62096]: ERROR nova.compute.manager [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 1015.580963] env[62096]: ERROR nova.compute.manager [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] raise e [ 1015.580963] env[62096]: ERROR nova.compute.manager [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 1015.580963] env[62096]: ERROR nova.compute.manager [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] nwinfo = self.network_api.allocate_for_instance( [ 1015.580963] env[62096]: ERROR nova.compute.manager [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 1015.580963] env[62096]: ERROR nova.compute.manager [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] created_port_ids = self._update_ports_for_instance( [ 1015.580963] env[62096]: ERROR nova.compute.manager [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 1015.580963] env[62096]: ERROR nova.compute.manager [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] with excutils.save_and_reraise_exception(): [ 1015.581381] env[62096]: ERROR nova.compute.manager [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1015.581381] env[62096]: ERROR nova.compute.manager [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] self.force_reraise() [ 1015.581381] env[62096]: ERROR nova.compute.manager [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1015.581381] env[62096]: ERROR nova.compute.manager [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] raise self.value [ 1015.581381] env[62096]: ERROR nova.compute.manager [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 1015.581381] env[62096]: ERROR nova.compute.manager [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] updated_port = self._update_port( [ 1015.581381] env[62096]: ERROR nova.compute.manager [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1015.581381] env[62096]: ERROR nova.compute.manager [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] _ensure_no_port_binding_failure(port) [ 1015.581381] env[62096]: ERROR nova.compute.manager [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1015.581381] env[62096]: ERROR nova.compute.manager [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] raise exception.PortBindingFailed(port_id=port['id']) [ 1015.581381] env[62096]: ERROR nova.compute.manager [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] nova.exception.PortBindingFailed: Binding failed for port 5a4fb4c4-62a4-43ef-ada0-710c6942bbbd, please check neutron logs for more information. [ 1015.581381] env[62096]: ERROR nova.compute.manager [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] [ 1015.581763] env[62096]: DEBUG nova.compute.utils [None req-ac57ab3e-a525-4f99-a9d7-b1c2a0201173 tempest-ServersNegativeTestJSON-1037308393 tempest-ServersNegativeTestJSON-1037308393-project-member] [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] Binding failed for port 5a4fb4c4-62a4-43ef-ada0-710c6942bbbd, please check neutron logs for more information. {{(pid=62096) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1015.581971] env[62096]: DEBUG nova.compute.manager [None req-ac57ab3e-a525-4f99-a9d7-b1c2a0201173 tempest-ServersNegativeTestJSON-1037308393 tempest-ServersNegativeTestJSON-1037308393-project-member] [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] Build of instance 94a8faf3-cbd0-4a08-b93b-2c37bb040afe was re-scheduled: Binding failed for port 5a4fb4c4-62a4-43ef-ada0-710c6942bbbd, please check neutron logs for more information. {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 1015.582377] env[62096]: DEBUG nova.compute.manager [None req-ac57ab3e-a525-4f99-a9d7-b1c2a0201173 tempest-ServersNegativeTestJSON-1037308393 tempest-ServersNegativeTestJSON-1037308393-project-member] [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] Unplugging VIFs for instance {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 1015.582634] env[62096]: DEBUG oslo_concurrency.lockutils [None req-ac57ab3e-a525-4f99-a9d7-b1c2a0201173 tempest-ServersNegativeTestJSON-1037308393 tempest-ServersNegativeTestJSON-1037308393-project-member] Acquiring lock "refresh_cache-94a8faf3-cbd0-4a08-b93b-2c37bb040afe" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1015.582783] env[62096]: DEBUG oslo_concurrency.lockutils [None req-ac57ab3e-a525-4f99-a9d7-b1c2a0201173 tempest-ServersNegativeTestJSON-1037308393 tempest-ServersNegativeTestJSON-1037308393-project-member] Acquired lock "refresh_cache-94a8faf3-cbd0-4a08-b93b-2c37bb040afe" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1015.582934] env[62096]: DEBUG nova.network.neutron [None req-ac57ab3e-a525-4f99-a9d7-b1c2a0201173 tempest-ServersNegativeTestJSON-1037308393 tempest-ServersNegativeTestJSON-1037308393-project-member] [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1015.583329] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ac57ab3e-a525-4f99-a9d7-b1c2a0201173 tempest-ServersNegativeTestJSON-1037308393 tempest-ServersNegativeTestJSON-1037308393-project-member] Expecting reply to msg 05dcb2577f9f4e889af2f9571f2482e1 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1015.590974] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 05dcb2577f9f4e889af2f9571f2482e1 [ 1016.100903] env[62096]: DEBUG nova.network.neutron [None req-ac57ab3e-a525-4f99-a9d7-b1c2a0201173 tempest-ServersNegativeTestJSON-1037308393 tempest-ServersNegativeTestJSON-1037308393-project-member] [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1016.175814] env[62096]: DEBUG nova.network.neutron [None req-ac57ab3e-a525-4f99-a9d7-b1c2a0201173 tempest-ServersNegativeTestJSON-1037308393 tempest-ServersNegativeTestJSON-1037308393-project-member] [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1016.176552] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ac57ab3e-a525-4f99-a9d7-b1c2a0201173 tempest-ServersNegativeTestJSON-1037308393 tempest-ServersNegativeTestJSON-1037308393-project-member] Expecting reply to msg 662177ad59524f86bc86c81c5d4bccd4 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1016.185253] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 662177ad59524f86bc86c81c5d4bccd4 [ 1016.679056] env[62096]: DEBUG oslo_concurrency.lockutils [None req-ac57ab3e-a525-4f99-a9d7-b1c2a0201173 tempest-ServersNegativeTestJSON-1037308393 tempest-ServersNegativeTestJSON-1037308393-project-member] Releasing lock "refresh_cache-94a8faf3-cbd0-4a08-b93b-2c37bb040afe" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1016.679341] env[62096]: DEBUG nova.compute.manager [None req-ac57ab3e-a525-4f99-a9d7-b1c2a0201173 tempest-ServersNegativeTestJSON-1037308393 tempest-ServersNegativeTestJSON-1037308393-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 1016.679484] env[62096]: DEBUG nova.compute.manager [None req-ac57ab3e-a525-4f99-a9d7-b1c2a0201173 tempest-ServersNegativeTestJSON-1037308393 tempest-ServersNegativeTestJSON-1037308393-project-member] [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1016.679647] env[62096]: DEBUG nova.network.neutron [None req-ac57ab3e-a525-4f99-a9d7-b1c2a0201173 tempest-ServersNegativeTestJSON-1037308393 tempest-ServersNegativeTestJSON-1037308393-project-member] [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1016.695642] env[62096]: DEBUG nova.network.neutron [None req-ac57ab3e-a525-4f99-a9d7-b1c2a0201173 tempest-ServersNegativeTestJSON-1037308393 tempest-ServersNegativeTestJSON-1037308393-project-member] [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1016.696349] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ac57ab3e-a525-4f99-a9d7-b1c2a0201173 tempest-ServersNegativeTestJSON-1037308393 tempest-ServersNegativeTestJSON-1037308393-project-member] Expecting reply to msg 5835d7ded5a2475ab46e938891ec6411 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1016.703357] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5835d7ded5a2475ab46e938891ec6411 [ 1017.198522] env[62096]: DEBUG nova.network.neutron [None req-ac57ab3e-a525-4f99-a9d7-b1c2a0201173 tempest-ServersNegativeTestJSON-1037308393 tempest-ServersNegativeTestJSON-1037308393-project-member] [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1017.199058] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ac57ab3e-a525-4f99-a9d7-b1c2a0201173 tempest-ServersNegativeTestJSON-1037308393 tempest-ServersNegativeTestJSON-1037308393-project-member] Expecting reply to msg cbef443b589a40c89623bd89d08626fc in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1017.209627] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cbef443b589a40c89623bd89d08626fc [ 1017.564690] env[62096]: DEBUG oslo_concurrency.lockutils [None req-e617020f-2ceb-4c10-918e-596d52d990fb tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Acquiring lock "b14755cc-d2c8-4985-ac38-936fe3b83a34" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1017.564935] env[62096]: DEBUG oslo_concurrency.lockutils [None req-e617020f-2ceb-4c10-918e-596d52d990fb tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Lock "b14755cc-d2c8-4985-ac38-936fe3b83a34" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1017.565410] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-e617020f-2ceb-4c10-918e-596d52d990fb tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Expecting reply to msg ca9e80f258a84e18b9f96730d1a78e39 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1017.574410] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ca9e80f258a84e18b9f96730d1a78e39 [ 1017.620973] env[62096]: DEBUG oslo_service.periodic_task [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62096) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1017.701374] env[62096]: INFO nova.compute.manager [None req-ac57ab3e-a525-4f99-a9d7-b1c2a0201173 tempest-ServersNegativeTestJSON-1037308393 tempest-ServersNegativeTestJSON-1037308393-project-member] [instance: 94a8faf3-cbd0-4a08-b93b-2c37bb040afe] Took 1.02 seconds to deallocate network for instance. [ 1017.703104] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ac57ab3e-a525-4f99-a9d7-b1c2a0201173 tempest-ServersNegativeTestJSON-1037308393 tempest-ServersNegativeTestJSON-1037308393-project-member] Expecting reply to msg e39ae9dd19a143938675ba6a280de852 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1017.737867] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e39ae9dd19a143938675ba6a280de852 [ 1017.809158] env[62096]: DEBUG oslo_concurrency.lockutils [None req-afe1c6c8-0c74-4b85-a76c-a1a1e808bb91 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Acquiring lock "917b3988-d6e0-40cd-b106-f7e8b6be6c61" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1017.809392] env[62096]: DEBUG oslo_concurrency.lockutils [None req-afe1c6c8-0c74-4b85-a76c-a1a1e808bb91 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Lock "917b3988-d6e0-40cd-b106-f7e8b6be6c61" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1017.809840] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-afe1c6c8-0c74-4b85-a76c-a1a1e808bb91 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Expecting reply to msg 8ec7d2a467414b4195aaab45e4d1c1d7 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1017.817519] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8ec7d2a467414b4195aaab45e4d1c1d7 [ 1018.067430] env[62096]: DEBUG nova.compute.manager [None req-e617020f-2ceb-4c10-918e-596d52d990fb tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] Starting instance... {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1018.069466] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-e617020f-2ceb-4c10-918e-596d52d990fb tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Expecting reply to msg 915d96d6723049219163c10f806b5e4c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1018.109447] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 915d96d6723049219163c10f806b5e4c [ 1018.127985] env[62096]: DEBUG oslo_concurrency.lockutils [None req-62b1c688-6095-443a-8c51-bdae69bfccc8 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Acquiring lock "da512c60-0b6e-434c-a049-2806292f698a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1018.128256] env[62096]: DEBUG oslo_concurrency.lockutils [None req-62b1c688-6095-443a-8c51-bdae69bfccc8 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Lock "da512c60-0b6e-434c-a049-2806292f698a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1018.128778] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-62b1c688-6095-443a-8c51-bdae69bfccc8 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Expecting reply to msg 44a209cae1d94ae7a5aa3648696a175c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1018.136722] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 44a209cae1d94ae7a5aa3648696a175c [ 1018.207211] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ac57ab3e-a525-4f99-a9d7-b1c2a0201173 tempest-ServersNegativeTestJSON-1037308393 tempest-ServersNegativeTestJSON-1037308393-project-member] Expecting reply to msg 576463261d08439c8d00854b75ef1420 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1018.237578] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 576463261d08439c8d00854b75ef1420 [ 1018.311752] env[62096]: DEBUG nova.compute.manager [None req-afe1c6c8-0c74-4b85-a76c-a1a1e808bb91 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] Starting instance... {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1018.313472] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-afe1c6c8-0c74-4b85-a76c-a1a1e808bb91 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Expecting reply to msg e5a7bde7f1954fcb91cb0beb08c3c130 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1018.341822] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e5a7bde7f1954fcb91cb0beb08c3c130 [ 1018.586257] env[62096]: DEBUG oslo_concurrency.lockutils [None req-e617020f-2ceb-4c10-918e-596d52d990fb tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1018.586528] env[62096]: DEBUG oslo_concurrency.lockutils [None req-e617020f-2ceb-4c10-918e-596d52d990fb tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1018.587912] env[62096]: INFO nova.compute.claims [None req-e617020f-2ceb-4c10-918e-596d52d990fb tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1018.589545] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-e617020f-2ceb-4c10-918e-596d52d990fb tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Expecting reply to msg a1da4b23d3cb4070bc3e83f0c9866e7f in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1018.620594] env[62096]: DEBUG oslo_service.periodic_task [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62096) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1018.620769] env[62096]: DEBUG nova.compute.manager [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Starting heal instance info cache {{(pid=62096) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9926}} [ 1018.621331] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg e5d7c73bf0d648958fe60371b78d3cee in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1018.622904] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a1da4b23d3cb4070bc3e83f0c9866e7f [ 1018.630343] env[62096]: DEBUG nova.compute.manager [None req-62b1c688-6095-443a-8c51-bdae69bfccc8 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] [instance: da512c60-0b6e-434c-a049-2806292f698a] Starting instance... {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1018.631883] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-62b1c688-6095-443a-8c51-bdae69bfccc8 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Expecting reply to msg c425b30e491043b3a3bba815c7dc3e71 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1018.646300] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e5d7c73bf0d648958fe60371b78d3cee [ 1018.658970] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c425b30e491043b3a3bba815c7dc3e71 [ 1018.727771] env[62096]: INFO nova.scheduler.client.report [None req-ac57ab3e-a525-4f99-a9d7-b1c2a0201173 tempest-ServersNegativeTestJSON-1037308393 tempest-ServersNegativeTestJSON-1037308393-project-member] Deleted allocations for instance 94a8faf3-cbd0-4a08-b93b-2c37bb040afe [ 1018.733672] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ac57ab3e-a525-4f99-a9d7-b1c2a0201173 tempest-ServersNegativeTestJSON-1037308393 tempest-ServersNegativeTestJSON-1037308393-project-member] Expecting reply to msg 4c19234c88d34ef8bb7865a9742dbc1e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1018.743247] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4c19234c88d34ef8bb7865a9742dbc1e [ 1018.829669] env[62096]: DEBUG oslo_concurrency.lockutils [None req-afe1c6c8-0c74-4b85-a76c-a1a1e808bb91 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1019.093201] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-e617020f-2ceb-4c10-918e-596d52d990fb tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Expecting reply to msg b9f17c3523204d32b51cada2bc8dad8c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1019.101015] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b9f17c3523204d32b51cada2bc8dad8c [ 1019.123174] env[62096]: DEBUG nova.compute.manager [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Didn't find any instances for network info cache update. {{(pid=62096) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10012}} [ 1019.147789] env[62096]: DEBUG oslo_concurrency.lockutils [None req-62b1c688-6095-443a-8c51-bdae69bfccc8 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1019.235860] env[62096]: DEBUG oslo_concurrency.lockutils [None req-ac57ab3e-a525-4f99-a9d7-b1c2a0201173 tempest-ServersNegativeTestJSON-1037308393 tempest-ServersNegativeTestJSON-1037308393-project-member] Lock "94a8faf3-cbd0-4a08-b93b-2c37bb040afe" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.043s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1019.620610] env[62096]: DEBUG oslo_service.periodic_task [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62096) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1019.622370] env[62096]: DEBUG oslo_service.periodic_task [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62096) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1019.622565] env[62096]: DEBUG oslo_service.periodic_task [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62096) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1019.650856] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41f5e529-db54-4a8c-aa3a-73b7d5c63011 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.659103] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-427c77c4-ab7e-4abb-bd75-42988f1767ec {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.689418] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00f4a6c5-8210-4837-a969-e24d10521612 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.696682] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c988313e-a59a-487c-b635-1127e7c7bf24 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.712720] env[62096]: DEBUG nova.compute.provider_tree [None req-e617020f-2ceb-4c10-918e-596d52d990fb tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1019.713246] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-e617020f-2ceb-4c10-918e-596d52d990fb tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Expecting reply to msg 30300f38bfc6445ab3eb6a712dd9eeda in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1019.722122] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 30300f38bfc6445ab3eb6a712dd9eeda [ 1020.215982] env[62096]: DEBUG nova.scheduler.client.report [None req-e617020f-2ceb-4c10-918e-596d52d990fb tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1020.218686] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-e617020f-2ceb-4c10-918e-596d52d990fb tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Expecting reply to msg 4a6abcacdc484ce1b1087c29122d8134 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1020.229858] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4a6abcacdc484ce1b1087c29122d8134 [ 1020.620026] env[62096]: DEBUG oslo_service.periodic_task [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62096) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1020.721125] env[62096]: DEBUG oslo_concurrency.lockutils [None req-e617020f-2ceb-4c10-918e-596d52d990fb tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.134s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1020.721461] env[62096]: DEBUG nova.compute.manager [None req-e617020f-2ceb-4c10-918e-596d52d990fb tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] Start building networks asynchronously for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1020.724971] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-e617020f-2ceb-4c10-918e-596d52d990fb tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Expecting reply to msg 01b910dd29bc45faad314d49d0373835 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1020.725952] env[62096]: DEBUG oslo_concurrency.lockutils [None req-afe1c6c8-0c74-4b85-a76c-a1a1e808bb91 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.896s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1020.727239] env[62096]: INFO nova.compute.claims [None req-afe1c6c8-0c74-4b85-a76c-a1a1e808bb91 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1020.728697] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-afe1c6c8-0c74-4b85-a76c-a1a1e808bb91 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Expecting reply to msg d169881a4d834050813f52444d84fc94 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1020.772059] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d169881a4d834050813f52444d84fc94 [ 1020.772059] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 01b910dd29bc45faad314d49d0373835 [ 1021.232044] env[62096]: DEBUG nova.compute.utils [None req-e617020f-2ceb-4c10-918e-596d52d990fb tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Using /dev/sd instead of None {{(pid=62096) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1021.232601] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-e617020f-2ceb-4c10-918e-596d52d990fb tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Expecting reply to msg e05167536c174e01a5549256f5bb7880 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1021.234736] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-afe1c6c8-0c74-4b85-a76c-a1a1e808bb91 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Expecting reply to msg ff3b2382065f434a8e5f273c78cd8923 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1021.235813] env[62096]: DEBUG nova.compute.manager [None req-e617020f-2ceb-4c10-918e-596d52d990fb tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] Allocating IP information in the background. {{(pid=62096) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1021.235996] env[62096]: DEBUG nova.network.neutron [None req-e617020f-2ceb-4c10-918e-596d52d990fb tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] allocate_for_instance() {{(pid=62096) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1021.242233] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ff3b2382065f434a8e5f273c78cd8923 [ 1021.242705] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e05167536c174e01a5549256f5bb7880 [ 1021.281333] env[62096]: DEBUG nova.policy [None req-e617020f-2ceb-4c10-918e-596d52d990fb tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7550056849d1433abc1c6de13cb2b173', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '474f2f36984a4d799094ebec2b552049', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62096) authorize /opt/stack/nova/nova/policy.py:203}} [ 1021.523149] env[62096]: DEBUG nova.network.neutron [None req-e617020f-2ceb-4c10-918e-596d52d990fb tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] Successfully created port: 9214896b-77d0-4e77-92ff-fc17784701e2 {{(pid=62096) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1021.621147] env[62096]: DEBUG oslo_service.periodic_task [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62096) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1021.621521] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg aa10c2dda7254890aaf4245e5e5f98e6 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1021.630970] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg aa10c2dda7254890aaf4245e5e5f98e6 [ 1021.736391] env[62096]: DEBUG nova.compute.manager [None req-e617020f-2ceb-4c10-918e-596d52d990fb tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] Start building block device mappings for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1021.738063] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-e617020f-2ceb-4c10-918e-596d52d990fb tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Expecting reply to msg 3a03da8892aa4e90b2204c3434439df7 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1021.784397] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3a03da8892aa4e90b2204c3434439df7 [ 1021.799299] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4aad0ef4-fa38-439d-9f5b-952f65032e8f {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.807346] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03d06412-0bb6-4665-ac92-0ba8101c5eee {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.836264] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e629053-2019-4edf-b135-0322687bb86c {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.842845] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7801ee96-9885-4553-be41-2a29e01c7831 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.855436] env[62096]: DEBUG nova.compute.provider_tree [None req-afe1c6c8-0c74-4b85-a76c-a1a1e808bb91 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1021.855915] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-afe1c6c8-0c74-4b85-a76c-a1a1e808bb91 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Expecting reply to msg 76f00645f361419abfcf78a4a3a61cd2 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1021.863957] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 76f00645f361419abfcf78a4a3a61cd2 [ 1022.119517] env[62096]: DEBUG nova.compute.manager [req-ba7c9050-b762-4b9f-8bd4-0ee80beb553b req-e41e0865-9504-4edc-907e-bff75c54204a service nova] [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] Received event network-changed-9214896b-77d0-4e77-92ff-fc17784701e2 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 1022.119741] env[62096]: DEBUG nova.compute.manager [req-ba7c9050-b762-4b9f-8bd4-0ee80beb553b req-e41e0865-9504-4edc-907e-bff75c54204a service nova] [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] Refreshing instance network info cache due to event network-changed-9214896b-77d0-4e77-92ff-fc17784701e2. {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 1022.119926] env[62096]: DEBUG oslo_concurrency.lockutils [req-ba7c9050-b762-4b9f-8bd4-0ee80beb553b req-e41e0865-9504-4edc-907e-bff75c54204a service nova] Acquiring lock "refresh_cache-b14755cc-d2c8-4985-ac38-936fe3b83a34" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1022.120090] env[62096]: DEBUG oslo_concurrency.lockutils [req-ba7c9050-b762-4b9f-8bd4-0ee80beb553b req-e41e0865-9504-4edc-907e-bff75c54204a service nova] Acquired lock "refresh_cache-b14755cc-d2c8-4985-ac38-936fe3b83a34" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1022.120476] env[62096]: DEBUG nova.network.neutron [req-ba7c9050-b762-4b9f-8bd4-0ee80beb553b req-e41e0865-9504-4edc-907e-bff75c54204a service nova] [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] Refreshing network info cache for port 9214896b-77d0-4e77-92ff-fc17784701e2 {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1022.122598] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-ba7c9050-b762-4b9f-8bd4-0ee80beb553b req-e41e0865-9504-4edc-907e-bff75c54204a service nova] Expecting reply to msg 1ff42f8c7cd54a108a412c46e0e09a14 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1022.124474] env[62096]: DEBUG oslo_concurrency.lockutils [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1022.128824] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1ff42f8c7cd54a108a412c46e0e09a14 [ 1022.245369] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-e617020f-2ceb-4c10-918e-596d52d990fb tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Expecting reply to msg 0a859c9c06de4989bcaa820f8bd74604 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1022.277647] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0a859c9c06de4989bcaa820f8bd74604 [ 1022.290028] env[62096]: ERROR nova.compute.manager [None req-e617020f-2ceb-4c10-918e-596d52d990fb tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 9214896b-77d0-4e77-92ff-fc17784701e2, please check neutron logs for more information. [ 1022.290028] env[62096]: ERROR nova.compute.manager Traceback (most recent call last): [ 1022.290028] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 1022.290028] env[62096]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 1022.290028] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 1022.290028] env[62096]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 1022.290028] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 1022.290028] env[62096]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 1022.290028] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1022.290028] env[62096]: ERROR nova.compute.manager self.force_reraise() [ 1022.290028] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1022.290028] env[62096]: ERROR nova.compute.manager raise self.value [ 1022.290028] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 1022.290028] env[62096]: ERROR nova.compute.manager updated_port = self._update_port( [ 1022.290028] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1022.290028] env[62096]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 1022.290781] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1022.290781] env[62096]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 1022.290781] env[62096]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 9214896b-77d0-4e77-92ff-fc17784701e2, please check neutron logs for more information. [ 1022.290781] env[62096]: ERROR nova.compute.manager [ 1022.290781] env[62096]: Traceback (most recent call last): [ 1022.290781] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 1022.290781] env[62096]: listener.cb(fileno) [ 1022.290781] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1022.290781] env[62096]: result = function(*args, **kwargs) [ 1022.290781] env[62096]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 1022.290781] env[62096]: return func(*args, **kwargs) [ 1022.290781] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 1022.290781] env[62096]: raise e [ 1022.290781] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 1022.290781] env[62096]: nwinfo = self.network_api.allocate_for_instance( [ 1022.290781] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 1022.290781] env[62096]: created_port_ids = self._update_ports_for_instance( [ 1022.290781] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 1022.290781] env[62096]: with excutils.save_and_reraise_exception(): [ 1022.290781] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1022.290781] env[62096]: self.force_reraise() [ 1022.290781] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1022.290781] env[62096]: raise self.value [ 1022.290781] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 1022.290781] env[62096]: updated_port = self._update_port( [ 1022.290781] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1022.290781] env[62096]: _ensure_no_port_binding_failure(port) [ 1022.290781] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1022.290781] env[62096]: raise exception.PortBindingFailed(port_id=port['id']) [ 1022.292070] env[62096]: nova.exception.PortBindingFailed: Binding failed for port 9214896b-77d0-4e77-92ff-fc17784701e2, please check neutron logs for more information. [ 1022.292070] env[62096]: Removing descriptor: 14 [ 1022.358858] env[62096]: DEBUG nova.scheduler.client.report [None req-afe1c6c8-0c74-4b85-a76c-a1a1e808bb91 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1022.361199] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-afe1c6c8-0c74-4b85-a76c-a1a1e808bb91 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Expecting reply to msg dd7ca476c21f4fa98c133ce1323aff0a in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1022.372014] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dd7ca476c21f4fa98c133ce1323aff0a [ 1022.640985] env[62096]: DEBUG nova.network.neutron [req-ba7c9050-b762-4b9f-8bd4-0ee80beb553b req-e41e0865-9504-4edc-907e-bff75c54204a service nova] [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1022.710127] env[62096]: DEBUG nova.network.neutron [req-ba7c9050-b762-4b9f-8bd4-0ee80beb553b req-e41e0865-9504-4edc-907e-bff75c54204a service nova] [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1022.710661] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-ba7c9050-b762-4b9f-8bd4-0ee80beb553b req-e41e0865-9504-4edc-907e-bff75c54204a service nova] Expecting reply to msg 1a8a04d7e7b04093a6ef07e78fa637f7 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1022.739694] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1a8a04d7e7b04093a6ef07e78fa637f7 [ 1022.748665] env[62096]: DEBUG nova.compute.manager [None req-e617020f-2ceb-4c10-918e-596d52d990fb tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] Start spawning the instance on the hypervisor. {{(pid=62096) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1022.772722] env[62096]: DEBUG nova.virt.hardware [None req-e617020f-2ceb-4c10-918e-596d52d990fb tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-19T11:52:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-19T11:51:55Z,direct_url=,disk_format='vmdk',id=fb764baa-3805-45c4-a694-aa91b0932110,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bd76085d004b408bbcf1ebf76ba2593e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-19T11:51:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1022.772981] env[62096]: DEBUG nova.virt.hardware [None req-e617020f-2ceb-4c10-918e-596d52d990fb tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Flavor limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1022.773140] env[62096]: DEBUG nova.virt.hardware [None req-e617020f-2ceb-4c10-918e-596d52d990fb tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Image limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1022.773323] env[62096]: DEBUG nova.virt.hardware [None req-e617020f-2ceb-4c10-918e-596d52d990fb tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Flavor pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1022.773468] env[62096]: DEBUG nova.virt.hardware [None req-e617020f-2ceb-4c10-918e-596d52d990fb tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Image pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1022.773612] env[62096]: DEBUG nova.virt.hardware [None req-e617020f-2ceb-4c10-918e-596d52d990fb tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1022.773811] env[62096]: DEBUG nova.virt.hardware [None req-e617020f-2ceb-4c10-918e-596d52d990fb tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1022.773965] env[62096]: DEBUG nova.virt.hardware [None req-e617020f-2ceb-4c10-918e-596d52d990fb tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1022.774157] env[62096]: DEBUG nova.virt.hardware [None req-e617020f-2ceb-4c10-918e-596d52d990fb tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Got 1 possible topologies {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1022.774326] env[62096]: DEBUG nova.virt.hardware [None req-e617020f-2ceb-4c10-918e-596d52d990fb tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1022.774494] env[62096]: DEBUG nova.virt.hardware [None req-e617020f-2ceb-4c10-918e-596d52d990fb tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1022.775361] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66549fa7-5dc0-4e90-accd-70a9be1a447c {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.783730] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f9bbe13-431a-4385-9b9d-d0ca3427fef3 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.796578] env[62096]: ERROR nova.compute.manager [None req-e617020f-2ceb-4c10-918e-596d52d990fb tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 9214896b-77d0-4e77-92ff-fc17784701e2, please check neutron logs for more information. [ 1022.796578] env[62096]: ERROR nova.compute.manager [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] Traceback (most recent call last): [ 1022.796578] env[62096]: ERROR nova.compute.manager [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 1022.796578] env[62096]: ERROR nova.compute.manager [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] yield resources [ 1022.796578] env[62096]: ERROR nova.compute.manager [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 1022.796578] env[62096]: ERROR nova.compute.manager [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] self.driver.spawn(context, instance, image_meta, [ 1022.796578] env[62096]: ERROR nova.compute.manager [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1022.796578] env[62096]: ERROR nova.compute.manager [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1022.796578] env[62096]: ERROR nova.compute.manager [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 1022.796578] env[62096]: ERROR nova.compute.manager [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] vm_ref = self.build_virtual_machine(instance, [ 1022.796578] env[62096]: ERROR nova.compute.manager [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 1022.797218] env[62096]: ERROR nova.compute.manager [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] vif_infos = vmwarevif.get_vif_info(self._session, [ 1022.797218] env[62096]: ERROR nova.compute.manager [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 1022.797218] env[62096]: ERROR nova.compute.manager [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] for vif in network_info: [ 1022.797218] env[62096]: ERROR nova.compute.manager [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 1022.797218] env[62096]: ERROR nova.compute.manager [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] return self._sync_wrapper(fn, *args, **kwargs) [ 1022.797218] env[62096]: ERROR nova.compute.manager [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 1022.797218] env[62096]: ERROR nova.compute.manager [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] self.wait() [ 1022.797218] env[62096]: ERROR nova.compute.manager [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 1022.797218] env[62096]: ERROR nova.compute.manager [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] self[:] = self._gt.wait() [ 1022.797218] env[62096]: ERROR nova.compute.manager [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 1022.797218] env[62096]: ERROR nova.compute.manager [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] return self._exit_event.wait() [ 1022.797218] env[62096]: ERROR nova.compute.manager [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 1022.797218] env[62096]: ERROR nova.compute.manager [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] current.throw(*self._exc) [ 1022.797840] env[62096]: ERROR nova.compute.manager [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1022.797840] env[62096]: ERROR nova.compute.manager [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] result = function(*args, **kwargs) [ 1022.797840] env[62096]: ERROR nova.compute.manager [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 1022.797840] env[62096]: ERROR nova.compute.manager [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] return func(*args, **kwargs) [ 1022.797840] env[62096]: ERROR nova.compute.manager [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 1022.797840] env[62096]: ERROR nova.compute.manager [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] raise e [ 1022.797840] env[62096]: ERROR nova.compute.manager [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 1022.797840] env[62096]: ERROR nova.compute.manager [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] nwinfo = self.network_api.allocate_for_instance( [ 1022.797840] env[62096]: ERROR nova.compute.manager [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 1022.797840] env[62096]: ERROR nova.compute.manager [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] created_port_ids = self._update_ports_for_instance( [ 1022.797840] env[62096]: ERROR nova.compute.manager [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 1022.797840] env[62096]: ERROR nova.compute.manager [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] with excutils.save_and_reraise_exception(): [ 1022.797840] env[62096]: ERROR nova.compute.manager [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1022.798447] env[62096]: ERROR nova.compute.manager [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] self.force_reraise() [ 1022.798447] env[62096]: ERROR nova.compute.manager [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1022.798447] env[62096]: ERROR nova.compute.manager [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] raise self.value [ 1022.798447] env[62096]: ERROR nova.compute.manager [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 1022.798447] env[62096]: ERROR nova.compute.manager [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] updated_port = self._update_port( [ 1022.798447] env[62096]: ERROR nova.compute.manager [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1022.798447] env[62096]: ERROR nova.compute.manager [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] _ensure_no_port_binding_failure(port) [ 1022.798447] env[62096]: ERROR nova.compute.manager [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1022.798447] env[62096]: ERROR nova.compute.manager [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] raise exception.PortBindingFailed(port_id=port['id']) [ 1022.798447] env[62096]: ERROR nova.compute.manager [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] nova.exception.PortBindingFailed: Binding failed for port 9214896b-77d0-4e77-92ff-fc17784701e2, please check neutron logs for more information. [ 1022.798447] env[62096]: ERROR nova.compute.manager [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] [ 1022.798447] env[62096]: INFO nova.compute.manager [None req-e617020f-2ceb-4c10-918e-596d52d990fb tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] Terminating instance [ 1022.799032] env[62096]: DEBUG oslo_concurrency.lockutils [None req-e617020f-2ceb-4c10-918e-596d52d990fb tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Acquiring lock "refresh_cache-b14755cc-d2c8-4985-ac38-936fe3b83a34" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1022.864138] env[62096]: DEBUG oslo_concurrency.lockutils [None req-afe1c6c8-0c74-4b85-a76c-a1a1e808bb91 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.138s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1022.864711] env[62096]: DEBUG nova.compute.manager [None req-afe1c6c8-0c74-4b85-a76c-a1a1e808bb91 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] Start building networks asynchronously for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1022.866386] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-afe1c6c8-0c74-4b85-a76c-a1a1e808bb91 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Expecting reply to msg b0bde94ca518414cb98d168f8f61d487 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1022.867416] env[62096]: DEBUG oslo_concurrency.lockutils [None req-62b1c688-6095-443a-8c51-bdae69bfccc8 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.720s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1022.868820] env[62096]: INFO nova.compute.claims [None req-62b1c688-6095-443a-8c51-bdae69bfccc8 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] [instance: da512c60-0b6e-434c-a049-2806292f698a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1022.870259] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-62b1c688-6095-443a-8c51-bdae69bfccc8 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Expecting reply to msg 607a328663064df3bb2a06412236e8ed in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1022.899482] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b0bde94ca518414cb98d168f8f61d487 [ 1022.905858] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 607a328663064df3bb2a06412236e8ed [ 1023.213404] env[62096]: DEBUG oslo_concurrency.lockutils [req-ba7c9050-b762-4b9f-8bd4-0ee80beb553b req-e41e0865-9504-4edc-907e-bff75c54204a service nova] Releasing lock "refresh_cache-b14755cc-d2c8-4985-ac38-936fe3b83a34" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1023.213932] env[62096]: DEBUG oslo_concurrency.lockutils [None req-e617020f-2ceb-4c10-918e-596d52d990fb tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Acquired lock "refresh_cache-b14755cc-d2c8-4985-ac38-936fe3b83a34" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1023.214117] env[62096]: DEBUG nova.network.neutron [None req-e617020f-2ceb-4c10-918e-596d52d990fb tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1023.214487] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-e617020f-2ceb-4c10-918e-596d52d990fb tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Expecting reply to msg ed648d5886094fd1a69811b2027a89a8 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1023.221191] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ed648d5886094fd1a69811b2027a89a8 [ 1023.373576] env[62096]: DEBUG nova.compute.utils [None req-afe1c6c8-0c74-4b85-a76c-a1a1e808bb91 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Using /dev/sd instead of None {{(pid=62096) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1023.374267] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-afe1c6c8-0c74-4b85-a76c-a1a1e808bb91 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Expecting reply to msg 75b8ca0cc9af4733a5ec7929319633df in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1023.376286] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-62b1c688-6095-443a-8c51-bdae69bfccc8 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Expecting reply to msg 61b5d1bb460d4fb69ef3a59ea22bb6b6 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1023.377200] env[62096]: DEBUG nova.compute.manager [None req-afe1c6c8-0c74-4b85-a76c-a1a1e808bb91 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] Allocating IP information in the background. {{(pid=62096) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1023.377371] env[62096]: DEBUG nova.network.neutron [None req-afe1c6c8-0c74-4b85-a76c-a1a1e808bb91 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] allocate_for_instance() {{(pid=62096) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1023.383062] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 61b5d1bb460d4fb69ef3a59ea22bb6b6 [ 1023.384574] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 75b8ca0cc9af4733a5ec7929319633df [ 1023.420586] env[62096]: DEBUG nova.policy [None req-afe1c6c8-0c74-4b85-a76c-a1a1e808bb91 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7550056849d1433abc1c6de13cb2b173', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '474f2f36984a4d799094ebec2b552049', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62096) authorize /opt/stack/nova/nova/policy.py:203}} [ 1023.656615] env[62096]: DEBUG nova.network.neutron [None req-afe1c6c8-0c74-4b85-a76c-a1a1e808bb91 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] Successfully created port: ee44dade-05aa-416d-a802-0501161cc1dd {{(pid=62096) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1023.732293] env[62096]: DEBUG nova.network.neutron [None req-e617020f-2ceb-4c10-918e-596d52d990fb tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1023.810434] env[62096]: DEBUG nova.network.neutron [None req-e617020f-2ceb-4c10-918e-596d52d990fb tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1023.810955] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-e617020f-2ceb-4c10-918e-596d52d990fb tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Expecting reply to msg e8b41d7793ad4b96a39fd913c99fe62e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1023.819279] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e8b41d7793ad4b96a39fd913c99fe62e [ 1023.878253] env[62096]: DEBUG nova.compute.manager [None req-afe1c6c8-0c74-4b85-a76c-a1a1e808bb91 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] Start building block device mappings for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1023.879935] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-afe1c6c8-0c74-4b85-a76c-a1a1e808bb91 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Expecting reply to msg a8dedaca57604cd2afc1d8b1845b41e9 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1023.915826] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a8dedaca57604cd2afc1d8b1845b41e9 [ 1023.935999] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14db8197-3b99-43d0-a1e4-f0bffae4e228 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.943256] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f00f190-133f-451b-a4a9-b76d7ce1d971 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.972855] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8864834-e7b3-4564-8449-a202c4932896 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.981585] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab377da3-f3a8-4cf0-bada-423aac97f3fd {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.999417] env[62096]: DEBUG nova.compute.provider_tree [None req-62b1c688-6095-443a-8c51-bdae69bfccc8 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1023.999912] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-62b1c688-6095-443a-8c51-bdae69bfccc8 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Expecting reply to msg 13e0f62f3e594ce5b54a63f2c0bf77a6 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1024.006900] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 13e0f62f3e594ce5b54a63f2c0bf77a6 [ 1024.145605] env[62096]: DEBUG nova.compute.manager [req-f684d6f5-8916-42a6-bc0f-c00ffd32d6b2 req-0b03c62e-ccfc-4db4-8614-456cd1b35e98 service nova] [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] Received event network-vif-deleted-9214896b-77d0-4e77-92ff-fc17784701e2 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 1024.313264] env[62096]: DEBUG oslo_concurrency.lockutils [None req-e617020f-2ceb-4c10-918e-596d52d990fb tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Releasing lock "refresh_cache-b14755cc-d2c8-4985-ac38-936fe3b83a34" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1024.313702] env[62096]: DEBUG nova.compute.manager [None req-e617020f-2ceb-4c10-918e-596d52d990fb tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] Start destroying the instance on the hypervisor. {{(pid=62096) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1024.313885] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-e617020f-2ceb-4c10-918e-596d52d990fb tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] Destroying instance {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1024.314315] env[62096]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c62eed03-2a4b-4645-b4f9-b8d994b02cd0 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.323185] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0302759f-fac5-423f-9b0a-75c88688177e {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.342758] env[62096]: WARNING nova.virt.vmwareapi.vmops [None req-e617020f-2ceb-4c10-918e-596d52d990fb tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance b14755cc-d2c8-4985-ac38-936fe3b83a34 could not be found. [ 1024.342952] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-e617020f-2ceb-4c10-918e-596d52d990fb tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] Instance destroyed {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1024.343124] env[62096]: INFO nova.compute.manager [None req-e617020f-2ceb-4c10-918e-596d52d990fb tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] Took 0.03 seconds to destroy the instance on the hypervisor. [ 1024.343357] env[62096]: DEBUG oslo.service.loopingcall [None req-e617020f-2ceb-4c10-918e-596d52d990fb tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1024.343561] env[62096]: DEBUG nova.compute.manager [-] [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1024.343649] env[62096]: DEBUG nova.network.neutron [-] [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1024.357742] env[62096]: DEBUG nova.network.neutron [-] [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1024.358223] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 4e3a0d4347c84fed8ec4cb1e95ad1015 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1024.365933] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4e3a0d4347c84fed8ec4cb1e95ad1015 [ 1024.384786] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-afe1c6c8-0c74-4b85-a76c-a1a1e808bb91 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Expecting reply to msg e0eaec3010ea40ee93bdfdec381c9b6c in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1024.418687] env[62096]: ERROR nova.compute.manager [None req-afe1c6c8-0c74-4b85-a76c-a1a1e808bb91 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port ee44dade-05aa-416d-a802-0501161cc1dd, please check neutron logs for more information. [ 1024.418687] env[62096]: ERROR nova.compute.manager Traceback (most recent call last): [ 1024.418687] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 1024.418687] env[62096]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 1024.418687] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 1024.418687] env[62096]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 1024.418687] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 1024.418687] env[62096]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 1024.418687] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1024.418687] env[62096]: ERROR nova.compute.manager self.force_reraise() [ 1024.418687] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1024.418687] env[62096]: ERROR nova.compute.manager raise self.value [ 1024.418687] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 1024.418687] env[62096]: ERROR nova.compute.manager updated_port = self._update_port( [ 1024.418687] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1024.418687] env[62096]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 1024.419269] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1024.419269] env[62096]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 1024.419269] env[62096]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port ee44dade-05aa-416d-a802-0501161cc1dd, please check neutron logs for more information. [ 1024.419269] env[62096]: ERROR nova.compute.manager [ 1024.419269] env[62096]: Traceback (most recent call last): [ 1024.419269] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 1024.419269] env[62096]: listener.cb(fileno) [ 1024.419269] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1024.419269] env[62096]: result = function(*args, **kwargs) [ 1024.419269] env[62096]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 1024.419269] env[62096]: return func(*args, **kwargs) [ 1024.419269] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 1024.419269] env[62096]: raise e [ 1024.419269] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 1024.419269] env[62096]: nwinfo = self.network_api.allocate_for_instance( [ 1024.419269] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 1024.419269] env[62096]: created_port_ids = self._update_ports_for_instance( [ 1024.419269] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 1024.419269] env[62096]: with excutils.save_and_reraise_exception(): [ 1024.419269] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1024.419269] env[62096]: self.force_reraise() [ 1024.419269] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1024.419269] env[62096]: raise self.value [ 1024.419269] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 1024.419269] env[62096]: updated_port = self._update_port( [ 1024.419269] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1024.419269] env[62096]: _ensure_no_port_binding_failure(port) [ 1024.419269] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1024.419269] env[62096]: raise exception.PortBindingFailed(port_id=port['id']) [ 1024.420341] env[62096]: nova.exception.PortBindingFailed: Binding failed for port ee44dade-05aa-416d-a802-0501161cc1dd, please check neutron logs for more information. [ 1024.420341] env[62096]: Removing descriptor: 14 [ 1024.423235] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e0eaec3010ea40ee93bdfdec381c9b6c [ 1024.502974] env[62096]: DEBUG nova.scheduler.client.report [None req-62b1c688-6095-443a-8c51-bdae69bfccc8 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1024.505937] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-62b1c688-6095-443a-8c51-bdae69bfccc8 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Expecting reply to msg 98dc7fbd98cd49e3b3765024ab7e7900 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1024.518617] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 98dc7fbd98cd49e3b3765024ab7e7900 [ 1024.860293] env[62096]: DEBUG nova.network.neutron [-] [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1024.860779] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg a83903a73c0a496c8338f06bf07fa92e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1024.869441] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a83903a73c0a496c8338f06bf07fa92e [ 1024.886886] env[62096]: DEBUG nova.compute.manager [None req-afe1c6c8-0c74-4b85-a76c-a1a1e808bb91 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] Start spawning the instance on the hypervisor. {{(pid=62096) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1024.912626] env[62096]: DEBUG nova.virt.hardware [None req-afe1c6c8-0c74-4b85-a76c-a1a1e808bb91 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-19T11:52:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-19T11:51:55Z,direct_url=,disk_format='vmdk',id=fb764baa-3805-45c4-a694-aa91b0932110,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bd76085d004b408bbcf1ebf76ba2593e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-19T11:51:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1024.912862] env[62096]: DEBUG nova.virt.hardware [None req-afe1c6c8-0c74-4b85-a76c-a1a1e808bb91 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Flavor limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1024.913019] env[62096]: DEBUG nova.virt.hardware [None req-afe1c6c8-0c74-4b85-a76c-a1a1e808bb91 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Image limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1024.913198] env[62096]: DEBUG nova.virt.hardware [None req-afe1c6c8-0c74-4b85-a76c-a1a1e808bb91 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Flavor pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1024.913343] env[62096]: DEBUG nova.virt.hardware [None req-afe1c6c8-0c74-4b85-a76c-a1a1e808bb91 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Image pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1024.913488] env[62096]: DEBUG nova.virt.hardware [None req-afe1c6c8-0c74-4b85-a76c-a1a1e808bb91 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1024.913688] env[62096]: DEBUG nova.virt.hardware [None req-afe1c6c8-0c74-4b85-a76c-a1a1e808bb91 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1024.913843] env[62096]: DEBUG nova.virt.hardware [None req-afe1c6c8-0c74-4b85-a76c-a1a1e808bb91 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1024.914006] env[62096]: DEBUG nova.virt.hardware [None req-afe1c6c8-0c74-4b85-a76c-a1a1e808bb91 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Got 1 possible topologies {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1024.914246] env[62096]: DEBUG nova.virt.hardware [None req-afe1c6c8-0c74-4b85-a76c-a1a1e808bb91 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1024.914420] env[62096]: DEBUG nova.virt.hardware [None req-afe1c6c8-0c74-4b85-a76c-a1a1e808bb91 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1024.915293] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db748660-0aff-4a39-b99c-5defa19c73a1 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.923025] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cb921fa-bf98-4cd7-9cb9-72145c71de22 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.936486] env[62096]: ERROR nova.compute.manager [None req-afe1c6c8-0c74-4b85-a76c-a1a1e808bb91 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port ee44dade-05aa-416d-a802-0501161cc1dd, please check neutron logs for more information. [ 1024.936486] env[62096]: ERROR nova.compute.manager [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] Traceback (most recent call last): [ 1024.936486] env[62096]: ERROR nova.compute.manager [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 1024.936486] env[62096]: ERROR nova.compute.manager [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] yield resources [ 1024.936486] env[62096]: ERROR nova.compute.manager [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 1024.936486] env[62096]: ERROR nova.compute.manager [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] self.driver.spawn(context, instance, image_meta, [ 1024.936486] env[62096]: ERROR nova.compute.manager [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1024.936486] env[62096]: ERROR nova.compute.manager [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1024.936486] env[62096]: ERROR nova.compute.manager [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 1024.936486] env[62096]: ERROR nova.compute.manager [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] vm_ref = self.build_virtual_machine(instance, [ 1024.936486] env[62096]: ERROR nova.compute.manager [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 1024.936949] env[62096]: ERROR nova.compute.manager [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] vif_infos = vmwarevif.get_vif_info(self._session, [ 1024.936949] env[62096]: ERROR nova.compute.manager [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 1024.936949] env[62096]: ERROR nova.compute.manager [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] for vif in network_info: [ 1024.936949] env[62096]: ERROR nova.compute.manager [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 1024.936949] env[62096]: ERROR nova.compute.manager [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] return self._sync_wrapper(fn, *args, **kwargs) [ 1024.936949] env[62096]: ERROR nova.compute.manager [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 1024.936949] env[62096]: ERROR nova.compute.manager [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] self.wait() [ 1024.936949] env[62096]: ERROR nova.compute.manager [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 1024.936949] env[62096]: ERROR nova.compute.manager [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] self[:] = self._gt.wait() [ 1024.936949] env[62096]: ERROR nova.compute.manager [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 1024.936949] env[62096]: ERROR nova.compute.manager [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] return self._exit_event.wait() [ 1024.936949] env[62096]: ERROR nova.compute.manager [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 1024.936949] env[62096]: ERROR nova.compute.manager [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] current.throw(*self._exc) [ 1024.937404] env[62096]: ERROR nova.compute.manager [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1024.937404] env[62096]: ERROR nova.compute.manager [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] result = function(*args, **kwargs) [ 1024.937404] env[62096]: ERROR nova.compute.manager [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 1024.937404] env[62096]: ERROR nova.compute.manager [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] return func(*args, **kwargs) [ 1024.937404] env[62096]: ERROR nova.compute.manager [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 1024.937404] env[62096]: ERROR nova.compute.manager [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] raise e [ 1024.937404] env[62096]: ERROR nova.compute.manager [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 1024.937404] env[62096]: ERROR nova.compute.manager [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] nwinfo = self.network_api.allocate_for_instance( [ 1024.937404] env[62096]: ERROR nova.compute.manager [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 1024.937404] env[62096]: ERROR nova.compute.manager [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] created_port_ids = self._update_ports_for_instance( [ 1024.937404] env[62096]: ERROR nova.compute.manager [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 1024.937404] env[62096]: ERROR nova.compute.manager [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] with excutils.save_and_reraise_exception(): [ 1024.937404] env[62096]: ERROR nova.compute.manager [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1024.937831] env[62096]: ERROR nova.compute.manager [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] self.force_reraise() [ 1024.937831] env[62096]: ERROR nova.compute.manager [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1024.937831] env[62096]: ERROR nova.compute.manager [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] raise self.value [ 1024.937831] env[62096]: ERROR nova.compute.manager [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 1024.937831] env[62096]: ERROR nova.compute.manager [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] updated_port = self._update_port( [ 1024.937831] env[62096]: ERROR nova.compute.manager [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1024.937831] env[62096]: ERROR nova.compute.manager [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] _ensure_no_port_binding_failure(port) [ 1024.937831] env[62096]: ERROR nova.compute.manager [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1024.937831] env[62096]: ERROR nova.compute.manager [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] raise exception.PortBindingFailed(port_id=port['id']) [ 1024.937831] env[62096]: ERROR nova.compute.manager [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] nova.exception.PortBindingFailed: Binding failed for port ee44dade-05aa-416d-a802-0501161cc1dd, please check neutron logs for more information. [ 1024.937831] env[62096]: ERROR nova.compute.manager [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] [ 1024.937831] env[62096]: INFO nova.compute.manager [None req-afe1c6c8-0c74-4b85-a76c-a1a1e808bb91 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] Terminating instance [ 1024.938756] env[62096]: DEBUG oslo_concurrency.lockutils [None req-afe1c6c8-0c74-4b85-a76c-a1a1e808bb91 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Acquiring lock "refresh_cache-917b3988-d6e0-40cd-b106-f7e8b6be6c61" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1024.938805] env[62096]: DEBUG oslo_concurrency.lockutils [None req-afe1c6c8-0c74-4b85-a76c-a1a1e808bb91 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Acquired lock "refresh_cache-917b3988-d6e0-40cd-b106-f7e8b6be6c61" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1024.938967] env[62096]: DEBUG nova.network.neutron [None req-afe1c6c8-0c74-4b85-a76c-a1a1e808bb91 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1024.939361] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-afe1c6c8-0c74-4b85-a76c-a1a1e808bb91 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Expecting reply to msg 51affc51cb4942d6819a79c4ee83cc9b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1024.947803] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 51affc51cb4942d6819a79c4ee83cc9b [ 1025.008623] env[62096]: DEBUG oslo_concurrency.lockutils [None req-62b1c688-6095-443a-8c51-bdae69bfccc8 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.141s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1025.009319] env[62096]: DEBUG nova.compute.manager [None req-62b1c688-6095-443a-8c51-bdae69bfccc8 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] [instance: da512c60-0b6e-434c-a049-2806292f698a] Start building networks asynchronously for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1025.010966] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-62b1c688-6095-443a-8c51-bdae69bfccc8 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Expecting reply to msg 55b528ebaa7840c89382cde4082b723f in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1025.012875] env[62096]: DEBUG oslo_concurrency.lockutils [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 2.888s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1025.013517] env[62096]: DEBUG oslo_concurrency.lockutils [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1025.013735] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62096) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1025.014885] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dc5212c-3c7a-41c6-afb4-39ddb1676137 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.022928] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8b91f23-3d1a-41f1-88aa-80362847fffc {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.039487] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbe33058-2ca5-43aa-b029-2c68b13a789e {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.046740] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20106fcf-f98f-4fe4-97f5-f28055c40d45 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.075215] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181782MB free_disk=127GB free_vcpus=48 pci_devices=None {{(pid=62096) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1025.075385] env[62096]: DEBUG oslo_concurrency.lockutils [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1025.075591] env[62096]: DEBUG oslo_concurrency.lockutils [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1025.076453] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg ae8db067cda34ecba88213e602f63e6b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1025.077584] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 55b528ebaa7840c89382cde4082b723f [ 1025.094018] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ae8db067cda34ecba88213e602f63e6b [ 1025.383288] env[62096]: INFO nova.compute.manager [-] [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] Took 1.02 seconds to deallocate network for instance. [ 1025.383288] env[62096]: DEBUG nova.compute.claims [None req-e617020f-2ceb-4c10-918e-596d52d990fb tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] Aborting claim: {{(pid=62096) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 1025.383288] env[62096]: DEBUG oslo_concurrency.lockutils [None req-e617020f-2ceb-4c10-918e-596d52d990fb tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1025.469281] env[62096]: DEBUG nova.network.neutron [None req-afe1c6c8-0c74-4b85-a76c-a1a1e808bb91 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1025.514724] env[62096]: DEBUG nova.compute.utils [None req-62b1c688-6095-443a-8c51-bdae69bfccc8 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Using /dev/sd instead of None {{(pid=62096) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1025.515565] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-62b1c688-6095-443a-8c51-bdae69bfccc8 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Expecting reply to msg cebc7803010c4efd870a32f1aa7abced in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1025.516607] env[62096]: DEBUG nova.compute.manager [None req-62b1c688-6095-443a-8c51-bdae69bfccc8 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] [instance: da512c60-0b6e-434c-a049-2806292f698a] Allocating IP information in the background. {{(pid=62096) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1025.516782] env[62096]: DEBUG nova.network.neutron [None req-62b1c688-6095-443a-8c51-bdae69bfccc8 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] [instance: da512c60-0b6e-434c-a049-2806292f698a] allocate_for_instance() {{(pid=62096) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1025.526103] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cebc7803010c4efd870a32f1aa7abced [ 1025.558561] env[62096]: DEBUG nova.policy [None req-62b1c688-6095-443a-8c51-bdae69bfccc8 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7550056849d1433abc1c6de13cb2b173', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '474f2f36984a4d799094ebec2b552049', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62096) authorize /opt/stack/nova/nova/policy.py:203}} [ 1025.580669] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg f866a65f49b5465e811f3512190293f8 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1025.582126] env[62096]: DEBUG nova.network.neutron [None req-afe1c6c8-0c74-4b85-a76c-a1a1e808bb91 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1025.582553] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-afe1c6c8-0c74-4b85-a76c-a1a1e808bb91 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Expecting reply to msg db70950984be42ad9bbb0a33d8fadbaa in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1025.591118] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f866a65f49b5465e811f3512190293f8 [ 1025.591621] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg db70950984be42ad9bbb0a33d8fadbaa [ 1025.813160] env[62096]: DEBUG nova.network.neutron [None req-62b1c688-6095-443a-8c51-bdae69bfccc8 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] [instance: da512c60-0b6e-434c-a049-2806292f698a] Successfully created port: 3c85207e-896f-41e7-a6d8-8a1ccd6962e3 {{(pid=62096) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1026.019986] env[62096]: DEBUG nova.compute.manager [None req-62b1c688-6095-443a-8c51-bdae69bfccc8 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] [instance: da512c60-0b6e-434c-a049-2806292f698a] Start building block device mappings for instance. {{(pid=62096) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1026.021730] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-62b1c688-6095-443a-8c51-bdae69bfccc8 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Expecting reply to msg a2124b2c50364871a12162a15458c685 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1026.053906] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a2124b2c50364871a12162a15458c685 [ 1026.086548] env[62096]: DEBUG oslo_concurrency.lockutils [None req-afe1c6c8-0c74-4b85-a76c-a1a1e808bb91 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Releasing lock "refresh_cache-917b3988-d6e0-40cd-b106-f7e8b6be6c61" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1026.086919] env[62096]: DEBUG nova.compute.manager [None req-afe1c6c8-0c74-4b85-a76c-a1a1e808bb91 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] Start destroying the instance on the hypervisor. {{(pid=62096) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1026.087095] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-afe1c6c8-0c74-4b85-a76c-a1a1e808bb91 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] Destroying instance {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1026.087774] env[62096]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-22b66ba3-9a97-4b58-9729-fd5beee9cfc3 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.096696] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-734f9ee4-0823-4549-8e07-5ebac04375da {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.107806] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Instance b14755cc-d2c8-4985-ac38-936fe3b83a34 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62096) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1026.107941] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Instance 917b3988-d6e0-40cd-b106-f7e8b6be6c61 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62096) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1026.108080] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Instance da512c60-0b6e-434c-a049-2806292f698a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62096) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1026.108258] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Total usable vcpus: 48, total allocated vcpus: 3 {{(pid=62096) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1026.108393] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1152MB phys_disk=200GB used_disk=3GB total_vcpus=48 used_vcpus=3 pci_stats=[] {{(pid=62096) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1026.121546] env[62096]: WARNING nova.virt.vmwareapi.vmops [None req-afe1c6c8-0c74-4b85-a76c-a1a1e808bb91 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 917b3988-d6e0-40cd-b106-f7e8b6be6c61 could not be found. [ 1026.121747] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-afe1c6c8-0c74-4b85-a76c-a1a1e808bb91 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] Instance destroyed {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1026.121916] env[62096]: INFO nova.compute.manager [None req-afe1c6c8-0c74-4b85-a76c-a1a1e808bb91 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] Took 0.03 seconds to destroy the instance on the hypervisor. [ 1026.122212] env[62096]: DEBUG oslo.service.loopingcall [None req-afe1c6c8-0c74-4b85-a76c-a1a1e808bb91 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1026.122718] env[62096]: DEBUG nova.compute.manager [-] [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1026.122718] env[62096]: DEBUG nova.network.neutron [-] [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1026.138771] env[62096]: DEBUG nova.network.neutron [-] [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1026.139083] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 4b5d80c5ff844384bc2ecb1f1e2efb2a in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1026.145711] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4b5d80c5ff844384bc2ecb1f1e2efb2a [ 1026.161540] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14ee8821-44c0-48e6-8755-15081157c993 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.168339] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc2ff152-a67a-4d74-a23c-bccbeb227d6c {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.197056] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fbabc64-e4bb-4d6e-9547-4ea2ff9df157 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.203901] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65774648-e202-4e16-8783-61e77c161f1f {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.217895] env[62096]: DEBUG nova.compute.provider_tree [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1026.218354] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg 4d50358f8a7a46ec837b29b74599f85a in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1026.225375] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4d50358f8a7a46ec837b29b74599f85a [ 1026.394930] env[62096]: DEBUG nova.compute.manager [req-0f32f300-526d-4ac2-b2fd-35007b8a01d5 req-3ed336ab-d415-4115-b68c-a6db139d1b78 service nova] [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] Received event network-changed-ee44dade-05aa-416d-a802-0501161cc1dd {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 1026.395267] env[62096]: DEBUG nova.compute.manager [req-0f32f300-526d-4ac2-b2fd-35007b8a01d5 req-3ed336ab-d415-4115-b68c-a6db139d1b78 service nova] [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] Refreshing instance network info cache due to event network-changed-ee44dade-05aa-416d-a802-0501161cc1dd. {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 1026.395455] env[62096]: DEBUG oslo_concurrency.lockutils [req-0f32f300-526d-4ac2-b2fd-35007b8a01d5 req-3ed336ab-d415-4115-b68c-a6db139d1b78 service nova] Acquiring lock "refresh_cache-917b3988-d6e0-40cd-b106-f7e8b6be6c61" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1026.396032] env[62096]: DEBUG oslo_concurrency.lockutils [req-0f32f300-526d-4ac2-b2fd-35007b8a01d5 req-3ed336ab-d415-4115-b68c-a6db139d1b78 service nova] Acquired lock "refresh_cache-917b3988-d6e0-40cd-b106-f7e8b6be6c61" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1026.396168] env[62096]: DEBUG nova.network.neutron [req-0f32f300-526d-4ac2-b2fd-35007b8a01d5 req-3ed336ab-d415-4115-b68c-a6db139d1b78 service nova] [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] Refreshing network info cache for port ee44dade-05aa-416d-a802-0501161cc1dd {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1026.396605] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-0f32f300-526d-4ac2-b2fd-35007b8a01d5 req-3ed336ab-d415-4115-b68c-a6db139d1b78 service nova] Expecting reply to msg 147c8c2c0b3f4c25a2e9e26e342f1fe1 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1026.403161] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 147c8c2c0b3f4c25a2e9e26e342f1fe1 [ 1026.527101] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-62b1c688-6095-443a-8c51-bdae69bfccc8 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Expecting reply to msg 24d29fad32834e46b3c94f75fc2a1c97 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1026.560022] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 24d29fad32834e46b3c94f75fc2a1c97 [ 1026.640934] env[62096]: DEBUG nova.network.neutron [-] [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1026.641384] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg a9545c23c40e463c8d571e67d4f7b99d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1026.644197] env[62096]: DEBUG nova.compute.manager [req-33e96ae9-4ff0-4f67-b11b-102a7cc68320 req-d623d6ad-0c0c-4b0d-b0a8-42db825220a5 service nova] [instance: da512c60-0b6e-434c-a049-2806292f698a] Received event network-changed-3c85207e-896f-41e7-a6d8-8a1ccd6962e3 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 1026.644395] env[62096]: DEBUG nova.compute.manager [req-33e96ae9-4ff0-4f67-b11b-102a7cc68320 req-d623d6ad-0c0c-4b0d-b0a8-42db825220a5 service nova] [instance: da512c60-0b6e-434c-a049-2806292f698a] Refreshing instance network info cache due to event network-changed-3c85207e-896f-41e7-a6d8-8a1ccd6962e3. {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 1026.644603] env[62096]: DEBUG oslo_concurrency.lockutils [req-33e96ae9-4ff0-4f67-b11b-102a7cc68320 req-d623d6ad-0c0c-4b0d-b0a8-42db825220a5 service nova] Acquiring lock "refresh_cache-da512c60-0b6e-434c-a049-2806292f698a" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1026.644744] env[62096]: DEBUG oslo_concurrency.lockutils [req-33e96ae9-4ff0-4f67-b11b-102a7cc68320 req-d623d6ad-0c0c-4b0d-b0a8-42db825220a5 service nova] Acquired lock "refresh_cache-da512c60-0b6e-434c-a049-2806292f698a" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1026.644954] env[62096]: DEBUG nova.network.neutron [req-33e96ae9-4ff0-4f67-b11b-102a7cc68320 req-d623d6ad-0c0c-4b0d-b0a8-42db825220a5 service nova] [instance: da512c60-0b6e-434c-a049-2806292f698a] Refreshing network info cache for port 3c85207e-896f-41e7-a6d8-8a1ccd6962e3 {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1026.645294] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-33e96ae9-4ff0-4f67-b11b-102a7cc68320 req-d623d6ad-0c0c-4b0d-b0a8-42db825220a5 service nova] Expecting reply to msg d20761eeac7748459c57086b461e43aa in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1026.649119] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a9545c23c40e463c8d571e67d4f7b99d [ 1026.651026] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d20761eeac7748459c57086b461e43aa [ 1026.721164] env[62096]: DEBUG nova.scheduler.client.report [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1026.723506] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Expecting reply to msg c7ee790813d24faeacde8f4c61ef1e68 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1026.737180] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c7ee790813d24faeacde8f4c61ef1e68 [ 1026.793745] env[62096]: ERROR nova.compute.manager [None req-62b1c688-6095-443a-8c51-bdae69bfccc8 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 3c85207e-896f-41e7-a6d8-8a1ccd6962e3, please check neutron logs for more information. [ 1026.793745] env[62096]: ERROR nova.compute.manager Traceback (most recent call last): [ 1026.793745] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 1026.793745] env[62096]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 1026.793745] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 1026.793745] env[62096]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 1026.793745] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 1026.793745] env[62096]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 1026.793745] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1026.793745] env[62096]: ERROR nova.compute.manager self.force_reraise() [ 1026.793745] env[62096]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1026.793745] env[62096]: ERROR nova.compute.manager raise self.value [ 1026.793745] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 1026.793745] env[62096]: ERROR nova.compute.manager updated_port = self._update_port( [ 1026.793745] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1026.793745] env[62096]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 1026.794111] env[62096]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1026.794111] env[62096]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 1026.794111] env[62096]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 3c85207e-896f-41e7-a6d8-8a1ccd6962e3, please check neutron logs for more information. [ 1026.794111] env[62096]: ERROR nova.compute.manager [ 1026.794111] env[62096]: Traceback (most recent call last): [ 1026.794111] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 1026.794111] env[62096]: listener.cb(fileno) [ 1026.794111] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1026.794111] env[62096]: result = function(*args, **kwargs) [ 1026.794111] env[62096]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 1026.794111] env[62096]: return func(*args, **kwargs) [ 1026.794111] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 1026.794111] env[62096]: raise e [ 1026.794111] env[62096]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 1026.794111] env[62096]: nwinfo = self.network_api.allocate_for_instance( [ 1026.794111] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 1026.794111] env[62096]: created_port_ids = self._update_ports_for_instance( [ 1026.794111] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 1026.794111] env[62096]: with excutils.save_and_reraise_exception(): [ 1026.794111] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1026.794111] env[62096]: self.force_reraise() [ 1026.794111] env[62096]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1026.794111] env[62096]: raise self.value [ 1026.794111] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 1026.794111] env[62096]: updated_port = self._update_port( [ 1026.794111] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1026.794111] env[62096]: _ensure_no_port_binding_failure(port) [ 1026.794111] env[62096]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1026.794111] env[62096]: raise exception.PortBindingFailed(port_id=port['id']) [ 1026.794669] env[62096]: nova.exception.PortBindingFailed: Binding failed for port 3c85207e-896f-41e7-a6d8-8a1ccd6962e3, please check neutron logs for more information. [ 1026.794669] env[62096]: Removing descriptor: 16 [ 1026.913274] env[62096]: DEBUG nova.network.neutron [req-0f32f300-526d-4ac2-b2fd-35007b8a01d5 req-3ed336ab-d415-4115-b68c-a6db139d1b78 service nova] [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1026.979220] env[62096]: DEBUG nova.network.neutron [req-0f32f300-526d-4ac2-b2fd-35007b8a01d5 req-3ed336ab-d415-4115-b68c-a6db139d1b78 service nova] [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1026.979743] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-0f32f300-526d-4ac2-b2fd-35007b8a01d5 req-3ed336ab-d415-4115-b68c-a6db139d1b78 service nova] Expecting reply to msg fd722f06bea4403b8aa6e2cab623efc5 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1026.988519] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fd722f06bea4403b8aa6e2cab623efc5 [ 1027.031924] env[62096]: DEBUG nova.compute.manager [None req-62b1c688-6095-443a-8c51-bdae69bfccc8 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] [instance: da512c60-0b6e-434c-a049-2806292f698a] Start spawning the instance on the hypervisor. {{(pid=62096) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1027.061042] env[62096]: DEBUG nova.virt.hardware [None req-62b1c688-6095-443a-8c51-bdae69bfccc8 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-19T11:52:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-19T11:51:55Z,direct_url=,disk_format='vmdk',id=fb764baa-3805-45c4-a694-aa91b0932110,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bd76085d004b408bbcf1ebf76ba2593e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-19T11:51:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1027.061280] env[62096]: DEBUG nova.virt.hardware [None req-62b1c688-6095-443a-8c51-bdae69bfccc8 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Flavor limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1027.061435] env[62096]: DEBUG nova.virt.hardware [None req-62b1c688-6095-443a-8c51-bdae69bfccc8 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Image limits 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1027.061655] env[62096]: DEBUG nova.virt.hardware [None req-62b1c688-6095-443a-8c51-bdae69bfccc8 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Flavor pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1027.061752] env[62096]: DEBUG nova.virt.hardware [None req-62b1c688-6095-443a-8c51-bdae69bfccc8 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Image pref 0:0:0 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1027.061896] env[62096]: DEBUG nova.virt.hardware [None req-62b1c688-6095-443a-8c51-bdae69bfccc8 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62096) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1027.062125] env[62096]: DEBUG nova.virt.hardware [None req-62b1c688-6095-443a-8c51-bdae69bfccc8 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1027.062306] env[62096]: DEBUG nova.virt.hardware [None req-62b1c688-6095-443a-8c51-bdae69bfccc8 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1027.062472] env[62096]: DEBUG nova.virt.hardware [None req-62b1c688-6095-443a-8c51-bdae69bfccc8 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Got 1 possible topologies {{(pid=62096) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1027.062630] env[62096]: DEBUG nova.virt.hardware [None req-62b1c688-6095-443a-8c51-bdae69bfccc8 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1027.062796] env[62096]: DEBUG nova.virt.hardware [None req-62b1c688-6095-443a-8c51-bdae69bfccc8 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62096) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1027.063646] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4215041d-e796-422f-90dc-4ff5e2e4548d {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.071375] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e39ef9c6-10cc-4ee4-83ed-4cd7c11693f6 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.084732] env[62096]: ERROR nova.compute.manager [None req-62b1c688-6095-443a-8c51-bdae69bfccc8 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] [instance: da512c60-0b6e-434c-a049-2806292f698a] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 3c85207e-896f-41e7-a6d8-8a1ccd6962e3, please check neutron logs for more information. [ 1027.084732] env[62096]: ERROR nova.compute.manager [instance: da512c60-0b6e-434c-a049-2806292f698a] Traceback (most recent call last): [ 1027.084732] env[62096]: ERROR nova.compute.manager [instance: da512c60-0b6e-434c-a049-2806292f698a] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 1027.084732] env[62096]: ERROR nova.compute.manager [instance: da512c60-0b6e-434c-a049-2806292f698a] yield resources [ 1027.084732] env[62096]: ERROR nova.compute.manager [instance: da512c60-0b6e-434c-a049-2806292f698a] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 1027.084732] env[62096]: ERROR nova.compute.manager [instance: da512c60-0b6e-434c-a049-2806292f698a] self.driver.spawn(context, instance, image_meta, [ 1027.084732] env[62096]: ERROR nova.compute.manager [instance: da512c60-0b6e-434c-a049-2806292f698a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1027.084732] env[62096]: ERROR nova.compute.manager [instance: da512c60-0b6e-434c-a049-2806292f698a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1027.084732] env[62096]: ERROR nova.compute.manager [instance: da512c60-0b6e-434c-a049-2806292f698a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 1027.084732] env[62096]: ERROR nova.compute.manager [instance: da512c60-0b6e-434c-a049-2806292f698a] vm_ref = self.build_virtual_machine(instance, [ 1027.084732] env[62096]: ERROR nova.compute.manager [instance: da512c60-0b6e-434c-a049-2806292f698a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 1027.085044] env[62096]: ERROR nova.compute.manager [instance: da512c60-0b6e-434c-a049-2806292f698a] vif_infos = vmwarevif.get_vif_info(self._session, [ 1027.085044] env[62096]: ERROR nova.compute.manager [instance: da512c60-0b6e-434c-a049-2806292f698a] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 1027.085044] env[62096]: ERROR nova.compute.manager [instance: da512c60-0b6e-434c-a049-2806292f698a] for vif in network_info: [ 1027.085044] env[62096]: ERROR nova.compute.manager [instance: da512c60-0b6e-434c-a049-2806292f698a] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 1027.085044] env[62096]: ERROR nova.compute.manager [instance: da512c60-0b6e-434c-a049-2806292f698a] return self._sync_wrapper(fn, *args, **kwargs) [ 1027.085044] env[62096]: ERROR nova.compute.manager [instance: da512c60-0b6e-434c-a049-2806292f698a] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 1027.085044] env[62096]: ERROR nova.compute.manager [instance: da512c60-0b6e-434c-a049-2806292f698a] self.wait() [ 1027.085044] env[62096]: ERROR nova.compute.manager [instance: da512c60-0b6e-434c-a049-2806292f698a] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 1027.085044] env[62096]: ERROR nova.compute.manager [instance: da512c60-0b6e-434c-a049-2806292f698a] self[:] = self._gt.wait() [ 1027.085044] env[62096]: ERROR nova.compute.manager [instance: da512c60-0b6e-434c-a049-2806292f698a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 1027.085044] env[62096]: ERROR nova.compute.manager [instance: da512c60-0b6e-434c-a049-2806292f698a] return self._exit_event.wait() [ 1027.085044] env[62096]: ERROR nova.compute.manager [instance: da512c60-0b6e-434c-a049-2806292f698a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 1027.085044] env[62096]: ERROR nova.compute.manager [instance: da512c60-0b6e-434c-a049-2806292f698a] current.throw(*self._exc) [ 1027.085303] env[62096]: ERROR nova.compute.manager [instance: da512c60-0b6e-434c-a049-2806292f698a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1027.085303] env[62096]: ERROR nova.compute.manager [instance: da512c60-0b6e-434c-a049-2806292f698a] result = function(*args, **kwargs) [ 1027.085303] env[62096]: ERROR nova.compute.manager [instance: da512c60-0b6e-434c-a049-2806292f698a] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 1027.085303] env[62096]: ERROR nova.compute.manager [instance: da512c60-0b6e-434c-a049-2806292f698a] return func(*args, **kwargs) [ 1027.085303] env[62096]: ERROR nova.compute.manager [instance: da512c60-0b6e-434c-a049-2806292f698a] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 1027.085303] env[62096]: ERROR nova.compute.manager [instance: da512c60-0b6e-434c-a049-2806292f698a] raise e [ 1027.085303] env[62096]: ERROR nova.compute.manager [instance: da512c60-0b6e-434c-a049-2806292f698a] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 1027.085303] env[62096]: ERROR nova.compute.manager [instance: da512c60-0b6e-434c-a049-2806292f698a] nwinfo = self.network_api.allocate_for_instance( [ 1027.085303] env[62096]: ERROR nova.compute.manager [instance: da512c60-0b6e-434c-a049-2806292f698a] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 1027.085303] env[62096]: ERROR nova.compute.manager [instance: da512c60-0b6e-434c-a049-2806292f698a] created_port_ids = self._update_ports_for_instance( [ 1027.085303] env[62096]: ERROR nova.compute.manager [instance: da512c60-0b6e-434c-a049-2806292f698a] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 1027.085303] env[62096]: ERROR nova.compute.manager [instance: da512c60-0b6e-434c-a049-2806292f698a] with excutils.save_and_reraise_exception(): [ 1027.085303] env[62096]: ERROR nova.compute.manager [instance: da512c60-0b6e-434c-a049-2806292f698a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1027.085552] env[62096]: ERROR nova.compute.manager [instance: da512c60-0b6e-434c-a049-2806292f698a] self.force_reraise() [ 1027.085552] env[62096]: ERROR nova.compute.manager [instance: da512c60-0b6e-434c-a049-2806292f698a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1027.085552] env[62096]: ERROR nova.compute.manager [instance: da512c60-0b6e-434c-a049-2806292f698a] raise self.value [ 1027.085552] env[62096]: ERROR nova.compute.manager [instance: da512c60-0b6e-434c-a049-2806292f698a] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 1027.085552] env[62096]: ERROR nova.compute.manager [instance: da512c60-0b6e-434c-a049-2806292f698a] updated_port = self._update_port( [ 1027.085552] env[62096]: ERROR nova.compute.manager [instance: da512c60-0b6e-434c-a049-2806292f698a] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1027.085552] env[62096]: ERROR nova.compute.manager [instance: da512c60-0b6e-434c-a049-2806292f698a] _ensure_no_port_binding_failure(port) [ 1027.085552] env[62096]: ERROR nova.compute.manager [instance: da512c60-0b6e-434c-a049-2806292f698a] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1027.085552] env[62096]: ERROR nova.compute.manager [instance: da512c60-0b6e-434c-a049-2806292f698a] raise exception.PortBindingFailed(port_id=port['id']) [ 1027.085552] env[62096]: ERROR nova.compute.manager [instance: da512c60-0b6e-434c-a049-2806292f698a] nova.exception.PortBindingFailed: Binding failed for port 3c85207e-896f-41e7-a6d8-8a1ccd6962e3, please check neutron logs for more information. [ 1027.085552] env[62096]: ERROR nova.compute.manager [instance: da512c60-0b6e-434c-a049-2806292f698a] [ 1027.085552] env[62096]: INFO nova.compute.manager [None req-62b1c688-6095-443a-8c51-bdae69bfccc8 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] [instance: da512c60-0b6e-434c-a049-2806292f698a] Terminating instance [ 1027.087040] env[62096]: DEBUG oslo_concurrency.lockutils [None req-62b1c688-6095-443a-8c51-bdae69bfccc8 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Acquiring lock "refresh_cache-da512c60-0b6e-434c-a049-2806292f698a" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1027.147095] env[62096]: INFO nova.compute.manager [-] [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] Took 1.02 seconds to deallocate network for instance. [ 1027.151082] env[62096]: DEBUG nova.compute.claims [None req-afe1c6c8-0c74-4b85-a76c-a1a1e808bb91 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] Aborting claim: {{(pid=62096) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 1027.151329] env[62096]: DEBUG oslo_concurrency.lockutils [None req-afe1c6c8-0c74-4b85-a76c-a1a1e808bb91 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1027.164749] env[62096]: DEBUG nova.network.neutron [req-33e96ae9-4ff0-4f67-b11b-102a7cc68320 req-d623d6ad-0c0c-4b0d-b0a8-42db825220a5 service nova] [instance: da512c60-0b6e-434c-a049-2806292f698a] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1027.225681] env[62096]: DEBUG nova.compute.resource_tracker [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62096) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1027.225868] env[62096]: DEBUG oslo_concurrency.lockutils [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.150s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1027.226117] env[62096]: DEBUG oslo_concurrency.lockutils [None req-e617020f-2ceb-4c10-918e-596d52d990fb tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 1.851s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1027.227904] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-e617020f-2ceb-4c10-918e-596d52d990fb tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Expecting reply to msg 459a455ee8e84579a0ae65276ed9ba5d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1027.260522] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 459a455ee8e84579a0ae65276ed9ba5d [ 1027.267539] env[62096]: DEBUG nova.network.neutron [req-33e96ae9-4ff0-4f67-b11b-102a7cc68320 req-d623d6ad-0c0c-4b0d-b0a8-42db825220a5 service nova] [instance: da512c60-0b6e-434c-a049-2806292f698a] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1027.268023] env[62096]: INFO oslo_messaging._drivers.amqpdriver [req-33e96ae9-4ff0-4f67-b11b-102a7cc68320 req-d623d6ad-0c0c-4b0d-b0a8-42db825220a5 service nova] Expecting reply to msg 691af633d2d64a6aa88513b2ee670dbf in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1027.276876] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 691af633d2d64a6aa88513b2ee670dbf [ 1027.481730] env[62096]: DEBUG oslo_concurrency.lockutils [req-0f32f300-526d-4ac2-b2fd-35007b8a01d5 req-3ed336ab-d415-4115-b68c-a6db139d1b78 service nova] Releasing lock "refresh_cache-917b3988-d6e0-40cd-b106-f7e8b6be6c61" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1027.481975] env[62096]: DEBUG nova.compute.manager [req-0f32f300-526d-4ac2-b2fd-35007b8a01d5 req-3ed336ab-d415-4115-b68c-a6db139d1b78 service nova] [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] Received event network-vif-deleted-ee44dade-05aa-416d-a802-0501161cc1dd {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 1027.769686] env[62096]: DEBUG oslo_concurrency.lockutils [req-33e96ae9-4ff0-4f67-b11b-102a7cc68320 req-d623d6ad-0c0c-4b0d-b0a8-42db825220a5 service nova] Releasing lock "refresh_cache-da512c60-0b6e-434c-a049-2806292f698a" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1027.770182] env[62096]: DEBUG oslo_concurrency.lockutils [None req-62b1c688-6095-443a-8c51-bdae69bfccc8 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Acquired lock "refresh_cache-da512c60-0b6e-434c-a049-2806292f698a" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1027.770246] env[62096]: DEBUG nova.network.neutron [None req-62b1c688-6095-443a-8c51-bdae69bfccc8 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] [instance: da512c60-0b6e-434c-a049-2806292f698a] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1027.770712] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-62b1c688-6095-443a-8c51-bdae69bfccc8 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Expecting reply to msg 5a03a39c750a4eda8fb64b54eaa7f1fd in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1027.777551] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5a03a39c750a4eda8fb64b54eaa7f1fd [ 1027.782755] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffcaba4f-bad9-4366-9959-39362722f6d7 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.791836] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-878948a7-474a-489b-99c5-2ead462af271 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.820335] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58e62ba7-6e85-400f-b2c4-9583333889bd {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.826835] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-139a7472-5ae8-4392-b5ed-9c0993432da6 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.838934] env[62096]: DEBUG nova.compute.provider_tree [None req-e617020f-2ceb-4c10-918e-596d52d990fb tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1027.839408] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-e617020f-2ceb-4c10-918e-596d52d990fb tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Expecting reply to msg 2b5c565bc65f47da85a82b9299c8a531 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1027.845663] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2b5c565bc65f47da85a82b9299c8a531 [ 1028.228781] env[62096]: DEBUG oslo_service.periodic_task [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62096) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1028.229022] env[62096]: DEBUG oslo_service.periodic_task [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62096) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1028.229173] env[62096]: DEBUG nova.compute.manager [None req-ee44eb2c-10cc-43ee-97ad-198a14f94214 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62096) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10545}} [ 1028.288439] env[62096]: DEBUG nova.network.neutron [None req-62b1c688-6095-443a-8c51-bdae69bfccc8 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] [instance: da512c60-0b6e-434c-a049-2806292f698a] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1028.342316] env[62096]: DEBUG nova.scheduler.client.report [None req-e617020f-2ceb-4c10-918e-596d52d990fb tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1028.344951] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-e617020f-2ceb-4c10-918e-596d52d990fb tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Expecting reply to msg 294e9887e5414318842bd325bf8a4de6 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1028.355145] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 294e9887e5414318842bd325bf8a4de6 [ 1028.361717] env[62096]: DEBUG nova.network.neutron [None req-62b1c688-6095-443a-8c51-bdae69bfccc8 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] [instance: da512c60-0b6e-434c-a049-2806292f698a] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1028.362144] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-62b1c688-6095-443a-8c51-bdae69bfccc8 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Expecting reply to msg 2445c644a0e2471b8902298356ab355a in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1028.368898] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2445c644a0e2471b8902298356ab355a [ 1028.672915] env[62096]: DEBUG nova.compute.manager [req-aee8225d-13d8-44ae-bf89-3f4a7b2bcbed req-e501c53b-bede-42f7-81fe-52a7752d039e service nova] [instance: da512c60-0b6e-434c-a049-2806292f698a] Received event network-vif-deleted-3c85207e-896f-41e7-a6d8-8a1ccd6962e3 {{(pid=62096) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 1028.847371] env[62096]: DEBUG oslo_concurrency.lockutils [None req-e617020f-2ceb-4c10-918e-596d52d990fb tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.621s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1028.847953] env[62096]: ERROR nova.compute.manager [None req-e617020f-2ceb-4c10-918e-596d52d990fb tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 9214896b-77d0-4e77-92ff-fc17784701e2, please check neutron logs for more information. [ 1028.847953] env[62096]: ERROR nova.compute.manager [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] Traceback (most recent call last): [ 1028.847953] env[62096]: ERROR nova.compute.manager [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 1028.847953] env[62096]: ERROR nova.compute.manager [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] self.driver.spawn(context, instance, image_meta, [ 1028.847953] env[62096]: ERROR nova.compute.manager [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1028.847953] env[62096]: ERROR nova.compute.manager [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1028.847953] env[62096]: ERROR nova.compute.manager [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 1028.847953] env[62096]: ERROR nova.compute.manager [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] vm_ref = self.build_virtual_machine(instance, [ 1028.847953] env[62096]: ERROR nova.compute.manager [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 1028.847953] env[62096]: ERROR nova.compute.manager [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] vif_infos = vmwarevif.get_vif_info(self._session, [ 1028.847953] env[62096]: ERROR nova.compute.manager [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 1028.848233] env[62096]: ERROR nova.compute.manager [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] for vif in network_info: [ 1028.848233] env[62096]: ERROR nova.compute.manager [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 1028.848233] env[62096]: ERROR nova.compute.manager [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] return self._sync_wrapper(fn, *args, **kwargs) [ 1028.848233] env[62096]: ERROR nova.compute.manager [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 1028.848233] env[62096]: ERROR nova.compute.manager [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] self.wait() [ 1028.848233] env[62096]: ERROR nova.compute.manager [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 1028.848233] env[62096]: ERROR nova.compute.manager [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] self[:] = self._gt.wait() [ 1028.848233] env[62096]: ERROR nova.compute.manager [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 1028.848233] env[62096]: ERROR nova.compute.manager [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] return self._exit_event.wait() [ 1028.848233] env[62096]: ERROR nova.compute.manager [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 1028.848233] env[62096]: ERROR nova.compute.manager [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] current.throw(*self._exc) [ 1028.848233] env[62096]: ERROR nova.compute.manager [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1028.848233] env[62096]: ERROR nova.compute.manager [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] result = function(*args, **kwargs) [ 1028.848485] env[62096]: ERROR nova.compute.manager [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 1028.848485] env[62096]: ERROR nova.compute.manager [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] return func(*args, **kwargs) [ 1028.848485] env[62096]: ERROR nova.compute.manager [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 1028.848485] env[62096]: ERROR nova.compute.manager [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] raise e [ 1028.848485] env[62096]: ERROR nova.compute.manager [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 1028.848485] env[62096]: ERROR nova.compute.manager [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] nwinfo = self.network_api.allocate_for_instance( [ 1028.848485] env[62096]: ERROR nova.compute.manager [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 1028.848485] env[62096]: ERROR nova.compute.manager [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] created_port_ids = self._update_ports_for_instance( [ 1028.848485] env[62096]: ERROR nova.compute.manager [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 1028.848485] env[62096]: ERROR nova.compute.manager [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] with excutils.save_and_reraise_exception(): [ 1028.848485] env[62096]: ERROR nova.compute.manager [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1028.848485] env[62096]: ERROR nova.compute.manager [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] self.force_reraise() [ 1028.848485] env[62096]: ERROR nova.compute.manager [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1028.848731] env[62096]: ERROR nova.compute.manager [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] raise self.value [ 1028.848731] env[62096]: ERROR nova.compute.manager [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 1028.848731] env[62096]: ERROR nova.compute.manager [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] updated_port = self._update_port( [ 1028.848731] env[62096]: ERROR nova.compute.manager [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1028.848731] env[62096]: ERROR nova.compute.manager [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] _ensure_no_port_binding_failure(port) [ 1028.848731] env[62096]: ERROR nova.compute.manager [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1028.848731] env[62096]: ERROR nova.compute.manager [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] raise exception.PortBindingFailed(port_id=port['id']) [ 1028.848731] env[62096]: ERROR nova.compute.manager [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] nova.exception.PortBindingFailed: Binding failed for port 9214896b-77d0-4e77-92ff-fc17784701e2, please check neutron logs for more information. [ 1028.848731] env[62096]: ERROR nova.compute.manager [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] [ 1028.848731] env[62096]: DEBUG nova.compute.utils [None req-e617020f-2ceb-4c10-918e-596d52d990fb tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] Binding failed for port 9214896b-77d0-4e77-92ff-fc17784701e2, please check neutron logs for more information. {{(pid=62096) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1028.849919] env[62096]: DEBUG oslo_concurrency.lockutils [None req-afe1c6c8-0c74-4b85-a76c-a1a1e808bb91 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 1.699s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1028.851719] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-afe1c6c8-0c74-4b85-a76c-a1a1e808bb91 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Expecting reply to msg 173d61e6d1cd4751935a62ee618e3b03 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1028.853223] env[62096]: DEBUG nova.compute.manager [None req-e617020f-2ceb-4c10-918e-596d52d990fb tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] Build of instance b14755cc-d2c8-4985-ac38-936fe3b83a34 was re-scheduled: Binding failed for port 9214896b-77d0-4e77-92ff-fc17784701e2, please check neutron logs for more information. {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 1028.853721] env[62096]: DEBUG nova.compute.manager [None req-e617020f-2ceb-4c10-918e-596d52d990fb tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] Unplugging VIFs for instance {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 1028.853960] env[62096]: DEBUG oslo_concurrency.lockutils [None req-e617020f-2ceb-4c10-918e-596d52d990fb tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Acquiring lock "refresh_cache-b14755cc-d2c8-4985-ac38-936fe3b83a34" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1028.854193] env[62096]: DEBUG oslo_concurrency.lockutils [None req-e617020f-2ceb-4c10-918e-596d52d990fb tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Acquired lock "refresh_cache-b14755cc-d2c8-4985-ac38-936fe3b83a34" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1028.854356] env[62096]: DEBUG nova.network.neutron [None req-e617020f-2ceb-4c10-918e-596d52d990fb tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1028.854719] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-e617020f-2ceb-4c10-918e-596d52d990fb tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Expecting reply to msg 0d32f8c338b14265ae393eb7e74d6bfb in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1028.863938] env[62096]: DEBUG oslo_concurrency.lockutils [None req-62b1c688-6095-443a-8c51-bdae69bfccc8 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Releasing lock "refresh_cache-da512c60-0b6e-434c-a049-2806292f698a" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1028.864344] env[62096]: DEBUG nova.compute.manager [None req-62b1c688-6095-443a-8c51-bdae69bfccc8 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] [instance: da512c60-0b6e-434c-a049-2806292f698a] Start destroying the instance on the hypervisor. {{(pid=62096) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1028.864532] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-62b1c688-6095-443a-8c51-bdae69bfccc8 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] [instance: da512c60-0b6e-434c-a049-2806292f698a] Destroying instance {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1028.864803] env[62096]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d903cc7d-e2d2-4a5d-a966-8ab9f896ee24 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.873466] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9a0abc2-6ae1-45d6-829f-0338edbd8b85 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.896479] env[62096]: WARNING nova.virt.vmwareapi.vmops [None req-62b1c688-6095-443a-8c51-bdae69bfccc8 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] [instance: da512c60-0b6e-434c-a049-2806292f698a] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance da512c60-0b6e-434c-a049-2806292f698a could not be found. [ 1028.896706] env[62096]: DEBUG nova.virt.vmwareapi.vmops [None req-62b1c688-6095-443a-8c51-bdae69bfccc8 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] [instance: da512c60-0b6e-434c-a049-2806292f698a] Instance destroyed {{(pid=62096) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1028.896884] env[62096]: INFO nova.compute.manager [None req-62b1c688-6095-443a-8c51-bdae69bfccc8 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] [instance: da512c60-0b6e-434c-a049-2806292f698a] Took 0.03 seconds to destroy the instance on the hypervisor. [ 1028.897122] env[62096]: DEBUG oslo.service.loopingcall [None req-62b1c688-6095-443a-8c51-bdae69bfccc8 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62096) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1028.897700] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 173d61e6d1cd4751935a62ee618e3b03 [ 1028.898179] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0d32f8c338b14265ae393eb7e74d6bfb [ 1028.898505] env[62096]: DEBUG nova.compute.manager [-] [instance: da512c60-0b6e-434c-a049-2806292f698a] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1028.898624] env[62096]: DEBUG nova.network.neutron [-] [instance: da512c60-0b6e-434c-a049-2806292f698a] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1028.912724] env[62096]: DEBUG nova.network.neutron [-] [instance: da512c60-0b6e-434c-a049-2806292f698a] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1028.913179] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 58b566a7ac3a47a885752cad8b154c86 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1028.918681] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 58b566a7ac3a47a885752cad8b154c86 [ 1029.380123] env[62096]: DEBUG nova.network.neutron [None req-e617020f-2ceb-4c10-918e-596d52d990fb tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1029.414566] env[62096]: DEBUG nova.network.neutron [-] [instance: da512c60-0b6e-434c-a049-2806292f698a] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1029.414997] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 3984575365294447a1f03ef32401ae78 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1029.422102] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f0a52a2-35d0-429b-aa81-27b5b59ab40e {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.425113] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3984575365294447a1f03ef32401ae78 [ 1029.430695] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4aee95a8-2a68-47ff-9229-a9cce8b9af29 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.460623] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10010d80-a3fd-402f-9abb-eddd826b1ec4 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.467489] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b73800f-3459-428b-b8de-23847f31ff1a {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.480436] env[62096]: DEBUG nova.compute.provider_tree [None req-afe1c6c8-0c74-4b85-a76c-a1a1e808bb91 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1029.480923] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-afe1c6c8-0c74-4b85-a76c-a1a1e808bb91 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Expecting reply to msg 4dc6380e14494d8a8efc00e966854c85 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1029.482231] env[62096]: DEBUG nova.network.neutron [None req-e617020f-2ceb-4c10-918e-596d52d990fb tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1029.482665] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-e617020f-2ceb-4c10-918e-596d52d990fb tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Expecting reply to msg c92f4294c8a9470892442746e14b370d in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1029.487631] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4dc6380e14494d8a8efc00e966854c85 [ 1029.489557] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c92f4294c8a9470892442746e14b370d [ 1029.917212] env[62096]: INFO nova.compute.manager [-] [instance: da512c60-0b6e-434c-a049-2806292f698a] Took 1.02 seconds to deallocate network for instance. [ 1029.919550] env[62096]: DEBUG nova.compute.claims [None req-62b1c688-6095-443a-8c51-bdae69bfccc8 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] [instance: da512c60-0b6e-434c-a049-2806292f698a] Aborting claim: {{(pid=62096) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 1029.919720] env[62096]: DEBUG oslo_concurrency.lockutils [None req-62b1c688-6095-443a-8c51-bdae69bfccc8 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1029.984844] env[62096]: DEBUG nova.scheduler.client.report [None req-afe1c6c8-0c74-4b85-a76c-a1a1e808bb91 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1029.987209] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-afe1c6c8-0c74-4b85-a76c-a1a1e808bb91 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Expecting reply to msg 59a3945f393e48479809020e3a5184ce in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1029.988207] env[62096]: DEBUG oslo_concurrency.lockutils [None req-e617020f-2ceb-4c10-918e-596d52d990fb tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Releasing lock "refresh_cache-b14755cc-d2c8-4985-ac38-936fe3b83a34" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1029.988409] env[62096]: DEBUG nova.compute.manager [None req-e617020f-2ceb-4c10-918e-596d52d990fb tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 1029.988586] env[62096]: DEBUG nova.compute.manager [None req-e617020f-2ceb-4c10-918e-596d52d990fb tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1029.988751] env[62096]: DEBUG nova.network.neutron [None req-e617020f-2ceb-4c10-918e-596d52d990fb tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1029.997247] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 59a3945f393e48479809020e3a5184ce [ 1030.003212] env[62096]: DEBUG nova.network.neutron [None req-e617020f-2ceb-4c10-918e-596d52d990fb tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1030.003829] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-e617020f-2ceb-4c10-918e-596d52d990fb tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Expecting reply to msg 4b6181dc524b4fd18bef0032c9f0537e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1030.009679] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4b6181dc524b4fd18bef0032c9f0537e [ 1030.492055] env[62096]: DEBUG oslo_concurrency.lockutils [None req-afe1c6c8-0c74-4b85-a76c-a1a1e808bb91 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.641s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1030.492055] env[62096]: ERROR nova.compute.manager [None req-afe1c6c8-0c74-4b85-a76c-a1a1e808bb91 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port ee44dade-05aa-416d-a802-0501161cc1dd, please check neutron logs for more information. [ 1030.492055] env[62096]: ERROR nova.compute.manager [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] Traceback (most recent call last): [ 1030.492055] env[62096]: ERROR nova.compute.manager [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 1030.492055] env[62096]: ERROR nova.compute.manager [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] self.driver.spawn(context, instance, image_meta, [ 1030.492055] env[62096]: ERROR nova.compute.manager [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1030.492055] env[62096]: ERROR nova.compute.manager [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1030.492055] env[62096]: ERROR nova.compute.manager [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 1030.492055] env[62096]: ERROR nova.compute.manager [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] vm_ref = self.build_virtual_machine(instance, [ 1030.492341] env[62096]: ERROR nova.compute.manager [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 1030.492341] env[62096]: ERROR nova.compute.manager [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] vif_infos = vmwarevif.get_vif_info(self._session, [ 1030.492341] env[62096]: ERROR nova.compute.manager [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 1030.492341] env[62096]: ERROR nova.compute.manager [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] for vif in network_info: [ 1030.492341] env[62096]: ERROR nova.compute.manager [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 1030.492341] env[62096]: ERROR nova.compute.manager [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] return self._sync_wrapper(fn, *args, **kwargs) [ 1030.492341] env[62096]: ERROR nova.compute.manager [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 1030.492341] env[62096]: ERROR nova.compute.manager [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] self.wait() [ 1030.492341] env[62096]: ERROR nova.compute.manager [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 1030.492341] env[62096]: ERROR nova.compute.manager [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] self[:] = self._gt.wait() [ 1030.492341] env[62096]: ERROR nova.compute.manager [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 1030.492341] env[62096]: ERROR nova.compute.manager [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] return self._exit_event.wait() [ 1030.492341] env[62096]: ERROR nova.compute.manager [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 1030.492605] env[62096]: ERROR nova.compute.manager [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] current.throw(*self._exc) [ 1030.492605] env[62096]: ERROR nova.compute.manager [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1030.492605] env[62096]: ERROR nova.compute.manager [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] result = function(*args, **kwargs) [ 1030.492605] env[62096]: ERROR nova.compute.manager [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 1030.492605] env[62096]: ERROR nova.compute.manager [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] return func(*args, **kwargs) [ 1030.492605] env[62096]: ERROR nova.compute.manager [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 1030.492605] env[62096]: ERROR nova.compute.manager [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] raise e [ 1030.492605] env[62096]: ERROR nova.compute.manager [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 1030.492605] env[62096]: ERROR nova.compute.manager [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] nwinfo = self.network_api.allocate_for_instance( [ 1030.492605] env[62096]: ERROR nova.compute.manager [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 1030.492605] env[62096]: ERROR nova.compute.manager [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] created_port_ids = self._update_ports_for_instance( [ 1030.492605] env[62096]: ERROR nova.compute.manager [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 1030.492605] env[62096]: ERROR nova.compute.manager [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] with excutils.save_and_reraise_exception(): [ 1030.492920] env[62096]: ERROR nova.compute.manager [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1030.492920] env[62096]: ERROR nova.compute.manager [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] self.force_reraise() [ 1030.492920] env[62096]: ERROR nova.compute.manager [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1030.492920] env[62096]: ERROR nova.compute.manager [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] raise self.value [ 1030.492920] env[62096]: ERROR nova.compute.manager [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 1030.492920] env[62096]: ERROR nova.compute.manager [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] updated_port = self._update_port( [ 1030.492920] env[62096]: ERROR nova.compute.manager [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1030.492920] env[62096]: ERROR nova.compute.manager [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] _ensure_no_port_binding_failure(port) [ 1030.492920] env[62096]: ERROR nova.compute.manager [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1030.492920] env[62096]: ERROR nova.compute.manager [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] raise exception.PortBindingFailed(port_id=port['id']) [ 1030.492920] env[62096]: ERROR nova.compute.manager [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] nova.exception.PortBindingFailed: Binding failed for port ee44dade-05aa-416d-a802-0501161cc1dd, please check neutron logs for more information. [ 1030.492920] env[62096]: ERROR nova.compute.manager [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] [ 1030.493175] env[62096]: DEBUG nova.compute.utils [None req-afe1c6c8-0c74-4b85-a76c-a1a1e808bb91 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] Binding failed for port ee44dade-05aa-416d-a802-0501161cc1dd, please check neutron logs for more information. {{(pid=62096) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1030.493966] env[62096]: DEBUG oslo_concurrency.lockutils [None req-62b1c688-6095-443a-8c51-bdae69bfccc8 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.574s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1030.495742] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-62b1c688-6095-443a-8c51-bdae69bfccc8 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Expecting reply to msg 255f1410ae9945718142c9ac6d1b183f in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1030.497195] env[62096]: DEBUG nova.compute.manager [None req-afe1c6c8-0c74-4b85-a76c-a1a1e808bb91 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] Build of instance 917b3988-d6e0-40cd-b106-f7e8b6be6c61 was re-scheduled: Binding failed for port ee44dade-05aa-416d-a802-0501161cc1dd, please check neutron logs for more information. {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 1030.497195] env[62096]: DEBUG nova.compute.manager [None req-afe1c6c8-0c74-4b85-a76c-a1a1e808bb91 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] Unplugging VIFs for instance {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 1030.497393] env[62096]: DEBUG oslo_concurrency.lockutils [None req-afe1c6c8-0c74-4b85-a76c-a1a1e808bb91 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Acquiring lock "refresh_cache-917b3988-d6e0-40cd-b106-f7e8b6be6c61" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1030.497528] env[62096]: DEBUG oslo_concurrency.lockutils [None req-afe1c6c8-0c74-4b85-a76c-a1a1e808bb91 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Acquired lock "refresh_cache-917b3988-d6e0-40cd-b106-f7e8b6be6c61" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1030.497686] env[62096]: DEBUG nova.network.neutron [None req-afe1c6c8-0c74-4b85-a76c-a1a1e808bb91 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1030.498036] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-afe1c6c8-0c74-4b85-a76c-a1a1e808bb91 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Expecting reply to msg 90e15e15781043d99887d732688a08f3 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1030.506007] env[62096]: DEBUG nova.network.neutron [None req-e617020f-2ceb-4c10-918e-596d52d990fb tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1030.506396] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-e617020f-2ceb-4c10-918e-596d52d990fb tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Expecting reply to msg 488890f574744180891acb240e78f586 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1030.512101] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 90e15e15781043d99887d732688a08f3 [ 1030.523097] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 488890f574744180891acb240e78f586 [ 1030.528449] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 255f1410ae9945718142c9ac6d1b183f [ 1031.009838] env[62096]: INFO nova.compute.manager [None req-e617020f-2ceb-4c10-918e-596d52d990fb tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] [instance: b14755cc-d2c8-4985-ac38-936fe3b83a34] Took 1.02 seconds to deallocate network for instance. [ 1031.011419] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-e617020f-2ceb-4c10-918e-596d52d990fb tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Expecting reply to msg 9d3a8745ea4646b3b82c0a8ea7cfc0bd in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1031.024570] env[62096]: DEBUG nova.network.neutron [None req-afe1c6c8-0c74-4b85-a76c-a1a1e808bb91 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1031.044077] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9d3a8745ea4646b3b82c0a8ea7cfc0bd [ 1031.048802] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8622a162-bddc-4ed4-a6e0-599e1603bf2c {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.056277] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3fbad5b-6b00-439f-86a5-a38bf95c8072 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.085208] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-472a4d59-ddb8-4215-8082-2553b5fbd6eb {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.091684] env[62096]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b54439de-5498-46c1-820b-db8ae5bc24e3 {{(pid=62096) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.095832] env[62096]: DEBUG nova.network.neutron [None req-afe1c6c8-0c74-4b85-a76c-a1a1e808bb91 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1031.096326] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-afe1c6c8-0c74-4b85-a76c-a1a1e808bb91 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Expecting reply to msg 5c2927ca11c3487087f5f1de3201f4d0 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1031.105793] env[62096]: DEBUG nova.compute.provider_tree [None req-62b1c688-6095-443a-8c51-bdae69bfccc8 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Inventory has not changed in ProviderTree for provider: 6eefe13c-ab55-4c03-987f-47a62756c3b3 {{(pid=62096) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1031.106239] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-62b1c688-6095-443a-8c51-bdae69bfccc8 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Expecting reply to msg 75c9fdb37e0b494c8b6871918b82781b in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1031.107176] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5c2927ca11c3487087f5f1de3201f4d0 [ 1031.112762] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 75c9fdb37e0b494c8b6871918b82781b [ 1031.516225] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-e617020f-2ceb-4c10-918e-596d52d990fb tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Expecting reply to msg 09da867988ab47d9bb2c27e26b3bbf15 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1031.544425] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 09da867988ab47d9bb2c27e26b3bbf15 [ 1031.598687] env[62096]: DEBUG oslo_concurrency.lockutils [None req-afe1c6c8-0c74-4b85-a76c-a1a1e808bb91 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Releasing lock "refresh_cache-917b3988-d6e0-40cd-b106-f7e8b6be6c61" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1031.598880] env[62096]: DEBUG nova.compute.manager [None req-afe1c6c8-0c74-4b85-a76c-a1a1e808bb91 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 1031.599066] env[62096]: DEBUG nova.compute.manager [None req-afe1c6c8-0c74-4b85-a76c-a1a1e808bb91 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1031.599234] env[62096]: DEBUG nova.network.neutron [None req-afe1c6c8-0c74-4b85-a76c-a1a1e808bb91 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1031.608879] env[62096]: DEBUG nova.scheduler.client.report [None req-62b1c688-6095-443a-8c51-bdae69bfccc8 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Inventory has not changed for provider 6eefe13c-ab55-4c03-987f-47a62756c3b3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 127, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62096) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1031.611047] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-62b1c688-6095-443a-8c51-bdae69bfccc8 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Expecting reply to msg af64231c0a4e482fb3a1bbeb6575cdcb in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1031.613254] env[62096]: DEBUG nova.network.neutron [None req-afe1c6c8-0c74-4b85-a76c-a1a1e808bb91 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1031.613744] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-afe1c6c8-0c74-4b85-a76c-a1a1e808bb91 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Expecting reply to msg 36df07f468514aff968014594efb02f3 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1031.619668] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 36df07f468514aff968014594efb02f3 [ 1031.620734] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg af64231c0a4e482fb3a1bbeb6575cdcb [ 1032.038836] env[62096]: INFO nova.scheduler.client.report [None req-e617020f-2ceb-4c10-918e-596d52d990fb tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Deleted allocations for instance b14755cc-d2c8-4985-ac38-936fe3b83a34 [ 1032.046400] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-e617020f-2ceb-4c10-918e-596d52d990fb tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Expecting reply to msg 1fa5d8b3647041d4ac1ef33c0692793e in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1032.060736] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1fa5d8b3647041d4ac1ef33c0692793e [ 1032.113717] env[62096]: DEBUG oslo_concurrency.lockutils [None req-62b1c688-6095-443a-8c51-bdae69bfccc8 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.620s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1032.114648] env[62096]: ERROR nova.compute.manager [None req-62b1c688-6095-443a-8c51-bdae69bfccc8 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] [instance: da512c60-0b6e-434c-a049-2806292f698a] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 3c85207e-896f-41e7-a6d8-8a1ccd6962e3, please check neutron logs for more information. [ 1032.114648] env[62096]: ERROR nova.compute.manager [instance: da512c60-0b6e-434c-a049-2806292f698a] Traceback (most recent call last): [ 1032.114648] env[62096]: ERROR nova.compute.manager [instance: da512c60-0b6e-434c-a049-2806292f698a] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 1032.114648] env[62096]: ERROR nova.compute.manager [instance: da512c60-0b6e-434c-a049-2806292f698a] self.driver.spawn(context, instance, image_meta, [ 1032.114648] env[62096]: ERROR nova.compute.manager [instance: da512c60-0b6e-434c-a049-2806292f698a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1032.114648] env[62096]: ERROR nova.compute.manager [instance: da512c60-0b6e-434c-a049-2806292f698a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1032.114648] env[62096]: ERROR nova.compute.manager [instance: da512c60-0b6e-434c-a049-2806292f698a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 1032.114648] env[62096]: ERROR nova.compute.manager [instance: da512c60-0b6e-434c-a049-2806292f698a] vm_ref = self.build_virtual_machine(instance, [ 1032.114648] env[62096]: ERROR nova.compute.manager [instance: da512c60-0b6e-434c-a049-2806292f698a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 1032.114648] env[62096]: ERROR nova.compute.manager [instance: da512c60-0b6e-434c-a049-2806292f698a] vif_infos = vmwarevif.get_vif_info(self._session, [ 1032.114648] env[62096]: ERROR nova.compute.manager [instance: da512c60-0b6e-434c-a049-2806292f698a] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 1032.114984] env[62096]: ERROR nova.compute.manager [instance: da512c60-0b6e-434c-a049-2806292f698a] for vif in network_info: [ 1032.114984] env[62096]: ERROR nova.compute.manager [instance: da512c60-0b6e-434c-a049-2806292f698a] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 1032.114984] env[62096]: ERROR nova.compute.manager [instance: da512c60-0b6e-434c-a049-2806292f698a] return self._sync_wrapper(fn, *args, **kwargs) [ 1032.114984] env[62096]: ERROR nova.compute.manager [instance: da512c60-0b6e-434c-a049-2806292f698a] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 1032.114984] env[62096]: ERROR nova.compute.manager [instance: da512c60-0b6e-434c-a049-2806292f698a] self.wait() [ 1032.114984] env[62096]: ERROR nova.compute.manager [instance: da512c60-0b6e-434c-a049-2806292f698a] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 1032.114984] env[62096]: ERROR nova.compute.manager [instance: da512c60-0b6e-434c-a049-2806292f698a] self[:] = self._gt.wait() [ 1032.114984] env[62096]: ERROR nova.compute.manager [instance: da512c60-0b6e-434c-a049-2806292f698a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 1032.114984] env[62096]: ERROR nova.compute.manager [instance: da512c60-0b6e-434c-a049-2806292f698a] return self._exit_event.wait() [ 1032.114984] env[62096]: ERROR nova.compute.manager [instance: da512c60-0b6e-434c-a049-2806292f698a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 1032.114984] env[62096]: ERROR nova.compute.manager [instance: da512c60-0b6e-434c-a049-2806292f698a] current.throw(*self._exc) [ 1032.114984] env[62096]: ERROR nova.compute.manager [instance: da512c60-0b6e-434c-a049-2806292f698a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1032.114984] env[62096]: ERROR nova.compute.manager [instance: da512c60-0b6e-434c-a049-2806292f698a] result = function(*args, **kwargs) [ 1032.115244] env[62096]: ERROR nova.compute.manager [instance: da512c60-0b6e-434c-a049-2806292f698a] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 1032.115244] env[62096]: ERROR nova.compute.manager [instance: da512c60-0b6e-434c-a049-2806292f698a] return func(*args, **kwargs) [ 1032.115244] env[62096]: ERROR nova.compute.manager [instance: da512c60-0b6e-434c-a049-2806292f698a] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 1032.115244] env[62096]: ERROR nova.compute.manager [instance: da512c60-0b6e-434c-a049-2806292f698a] raise e [ 1032.115244] env[62096]: ERROR nova.compute.manager [instance: da512c60-0b6e-434c-a049-2806292f698a] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 1032.115244] env[62096]: ERROR nova.compute.manager [instance: da512c60-0b6e-434c-a049-2806292f698a] nwinfo = self.network_api.allocate_for_instance( [ 1032.115244] env[62096]: ERROR nova.compute.manager [instance: da512c60-0b6e-434c-a049-2806292f698a] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 1032.115244] env[62096]: ERROR nova.compute.manager [instance: da512c60-0b6e-434c-a049-2806292f698a] created_port_ids = self._update_ports_for_instance( [ 1032.115244] env[62096]: ERROR nova.compute.manager [instance: da512c60-0b6e-434c-a049-2806292f698a] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 1032.115244] env[62096]: ERROR nova.compute.manager [instance: da512c60-0b6e-434c-a049-2806292f698a] with excutils.save_and_reraise_exception(): [ 1032.115244] env[62096]: ERROR nova.compute.manager [instance: da512c60-0b6e-434c-a049-2806292f698a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1032.115244] env[62096]: ERROR nova.compute.manager [instance: da512c60-0b6e-434c-a049-2806292f698a] self.force_reraise() [ 1032.115244] env[62096]: ERROR nova.compute.manager [instance: da512c60-0b6e-434c-a049-2806292f698a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1032.115498] env[62096]: ERROR nova.compute.manager [instance: da512c60-0b6e-434c-a049-2806292f698a] raise self.value [ 1032.115498] env[62096]: ERROR nova.compute.manager [instance: da512c60-0b6e-434c-a049-2806292f698a] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 1032.115498] env[62096]: ERROR nova.compute.manager [instance: da512c60-0b6e-434c-a049-2806292f698a] updated_port = self._update_port( [ 1032.115498] env[62096]: ERROR nova.compute.manager [instance: da512c60-0b6e-434c-a049-2806292f698a] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1032.115498] env[62096]: ERROR nova.compute.manager [instance: da512c60-0b6e-434c-a049-2806292f698a] _ensure_no_port_binding_failure(port) [ 1032.115498] env[62096]: ERROR nova.compute.manager [instance: da512c60-0b6e-434c-a049-2806292f698a] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1032.115498] env[62096]: ERROR nova.compute.manager [instance: da512c60-0b6e-434c-a049-2806292f698a] raise exception.PortBindingFailed(port_id=port['id']) [ 1032.115498] env[62096]: ERROR nova.compute.manager [instance: da512c60-0b6e-434c-a049-2806292f698a] nova.exception.PortBindingFailed: Binding failed for port 3c85207e-896f-41e7-a6d8-8a1ccd6962e3, please check neutron logs for more information. [ 1032.115498] env[62096]: ERROR nova.compute.manager [instance: da512c60-0b6e-434c-a049-2806292f698a] [ 1032.116572] env[62096]: DEBUG nova.compute.utils [None req-62b1c688-6095-443a-8c51-bdae69bfccc8 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] [instance: da512c60-0b6e-434c-a049-2806292f698a] Binding failed for port 3c85207e-896f-41e7-a6d8-8a1ccd6962e3, please check neutron logs for more information. {{(pid=62096) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1032.117948] env[62096]: DEBUG nova.network.neutron [None req-afe1c6c8-0c74-4b85-a76c-a1a1e808bb91 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1032.118502] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-afe1c6c8-0c74-4b85-a76c-a1a1e808bb91 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Expecting reply to msg bfd28688828642eb92f77f38be14832f in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1032.119493] env[62096]: DEBUG nova.compute.manager [None req-62b1c688-6095-443a-8c51-bdae69bfccc8 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] [instance: da512c60-0b6e-434c-a049-2806292f698a] Build of instance da512c60-0b6e-434c-a049-2806292f698a was re-scheduled: Binding failed for port 3c85207e-896f-41e7-a6d8-8a1ccd6962e3, please check neutron logs for more information. {{(pid=62096) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 1032.120060] env[62096]: DEBUG nova.compute.manager [None req-62b1c688-6095-443a-8c51-bdae69bfccc8 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] [instance: da512c60-0b6e-434c-a049-2806292f698a] Unplugging VIFs for instance {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 1032.120384] env[62096]: DEBUG oslo_concurrency.lockutils [None req-62b1c688-6095-443a-8c51-bdae69bfccc8 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Acquiring lock "refresh_cache-da512c60-0b6e-434c-a049-2806292f698a" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1032.120626] env[62096]: DEBUG oslo_concurrency.lockutils [None req-62b1c688-6095-443a-8c51-bdae69bfccc8 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Acquired lock "refresh_cache-da512c60-0b6e-434c-a049-2806292f698a" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1032.120873] env[62096]: DEBUG nova.network.neutron [None req-62b1c688-6095-443a-8c51-bdae69bfccc8 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] [instance: da512c60-0b6e-434c-a049-2806292f698a] Building network info cache for instance {{(pid=62096) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1032.121319] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-62b1c688-6095-443a-8c51-bdae69bfccc8 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Expecting reply to msg f4fdf42b16be4667bafd32b9f86114aa in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1032.125791] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bfd28688828642eb92f77f38be14832f [ 1032.127135] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f4fdf42b16be4667bafd32b9f86114aa [ 1032.547968] env[62096]: DEBUG oslo_concurrency.lockutils [None req-e617020f-2ceb-4c10-918e-596d52d990fb tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Lock "b14755cc-d2c8-4985-ac38-936fe3b83a34" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.983s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1032.623863] env[62096]: INFO nova.compute.manager [None req-afe1c6c8-0c74-4b85-a76c-a1a1e808bb91 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] [instance: 917b3988-d6e0-40cd-b106-f7e8b6be6c61] Took 1.02 seconds to deallocate network for instance. [ 1032.625544] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-afe1c6c8-0c74-4b85-a76c-a1a1e808bb91 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Expecting reply to msg f4285ba706aa4a06a508402db03b0a1f in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1032.641592] env[62096]: DEBUG nova.network.neutron [None req-62b1c688-6095-443a-8c51-bdae69bfccc8 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] [instance: da512c60-0b6e-434c-a049-2806292f698a] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1032.660968] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f4285ba706aa4a06a508402db03b0a1f [ 1032.716542] env[62096]: DEBUG nova.network.neutron [None req-62b1c688-6095-443a-8c51-bdae69bfccc8 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] [instance: da512c60-0b6e-434c-a049-2806292f698a] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1032.717101] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-62b1c688-6095-443a-8c51-bdae69bfccc8 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Expecting reply to msg ead780ae66d344539720f4c787a90ef6 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1032.726488] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ead780ae66d344539720f4c787a90ef6 [ 1033.131503] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-afe1c6c8-0c74-4b85-a76c-a1a1e808bb91 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Expecting reply to msg d4ec20895cf64deda08669d801aeb917 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1033.160570] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d4ec20895cf64deda08669d801aeb917 [ 1033.219026] env[62096]: DEBUG oslo_concurrency.lockutils [None req-62b1c688-6095-443a-8c51-bdae69bfccc8 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Releasing lock "refresh_cache-da512c60-0b6e-434c-a049-2806292f698a" {{(pid=62096) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1033.219271] env[62096]: DEBUG nova.compute.manager [None req-62b1c688-6095-443a-8c51-bdae69bfccc8 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62096) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 1033.219451] env[62096]: DEBUG nova.compute.manager [None req-62b1c688-6095-443a-8c51-bdae69bfccc8 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] [instance: da512c60-0b6e-434c-a049-2806292f698a] Deallocating network for instance {{(pid=62096) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1033.219620] env[62096]: DEBUG nova.network.neutron [None req-62b1c688-6095-443a-8c51-bdae69bfccc8 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] [instance: da512c60-0b6e-434c-a049-2806292f698a] deallocate_for_instance() {{(pid=62096) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1033.233818] env[62096]: DEBUG nova.network.neutron [None req-62b1c688-6095-443a-8c51-bdae69bfccc8 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] [instance: da512c60-0b6e-434c-a049-2806292f698a] Instance cache missing network info. {{(pid=62096) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1033.234382] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-62b1c688-6095-443a-8c51-bdae69bfccc8 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Expecting reply to msg 551fd954163c47e0ae0d912d358bf774 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1033.241405] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 551fd954163c47e0ae0d912d358bf774 [ 1033.652772] env[62096]: INFO nova.scheduler.client.report [None req-afe1c6c8-0c74-4b85-a76c-a1a1e808bb91 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Deleted allocations for instance 917b3988-d6e0-40cd-b106-f7e8b6be6c61 [ 1033.658607] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-afe1c6c8-0c74-4b85-a76c-a1a1e808bb91 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Expecting reply to msg ef084001bd7e4be087ade3e3953117cc in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1033.673292] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ef084001bd7e4be087ade3e3953117cc [ 1033.736375] env[62096]: DEBUG nova.network.neutron [None req-62b1c688-6095-443a-8c51-bdae69bfccc8 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] [instance: da512c60-0b6e-434c-a049-2806292f698a] Updating instance_info_cache with network_info: [] {{(pid=62096) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1033.736881] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-62b1c688-6095-443a-8c51-bdae69bfccc8 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Expecting reply to msg 68316ccfdcc043238ea5c5893a2719c5 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1033.744247] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 68316ccfdcc043238ea5c5893a2719c5 [ 1034.160376] env[62096]: DEBUG oslo_concurrency.lockutils [None req-afe1c6c8-0c74-4b85-a76c-a1a1e808bb91 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Lock "917b3988-d6e0-40cd-b106-f7e8b6be6c61" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.351s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1034.239217] env[62096]: INFO nova.compute.manager [None req-62b1c688-6095-443a-8c51-bdae69bfccc8 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] [instance: da512c60-0b6e-434c-a049-2806292f698a] Took 1.02 seconds to deallocate network for instance. [ 1034.240951] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-62b1c688-6095-443a-8c51-bdae69bfccc8 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Expecting reply to msg dfd4206a1a124506978fe5c84c8ed339 in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1034.271513] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dfd4206a1a124506978fe5c84c8ed339 [ 1034.745217] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-62b1c688-6095-443a-8c51-bdae69bfccc8 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Expecting reply to msg ab72f7add5a54841b9cc6fce4cad38fb in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1034.772995] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ab72f7add5a54841b9cc6fce4cad38fb [ 1035.264350] env[62096]: INFO nova.scheduler.client.report [None req-62b1c688-6095-443a-8c51-bdae69bfccc8 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Deleted allocations for instance da512c60-0b6e-434c-a049-2806292f698a [ 1035.270197] env[62096]: INFO oslo_messaging._drivers.amqpdriver [None req-62b1c688-6095-443a-8c51-bdae69bfccc8 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Expecting reply to msg e0253eb8837f4b56a2762b0990a3d2ba in queue reply_9a3b5ea54f484077b6ce7cc5c6d32988 [ 1035.279712] env[62096]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e0253eb8837f4b56a2762b0990a3d2ba [ 1035.772454] env[62096]: DEBUG oslo_concurrency.lockutils [None req-62b1c688-6095-443a-8c51-bdae69bfccc8 tempest-ListServerFiltersTestJSON-878959284 tempest-ListServerFiltersTestJSON-878959284-project-member] Lock "da512c60-0b6e-434c-a049-2806292f698a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.644s {{(pid=62096) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}}