[ 467.981498] env[61995]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'linux_bridge' {{(pid=61995) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 467.981858] env[61995]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'noop' {{(pid=61995) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 467.981991] env[61995]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'ovs' {{(pid=61995) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 467.982257] env[61995]: INFO os_vif [-] Loaded VIF plugins: linux_bridge, noop, ovs [ 468.070584] env[61995]: DEBUG oslo_concurrency.processutils [-] Running cmd (subprocess): grep -F node.session.scan /sbin/iscsiadm {{(pid=61995) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:390}} [ 468.079607] env[61995]: DEBUG oslo_concurrency.processutils [-] CMD "grep -F node.session.scan /sbin/iscsiadm" returned: 0 in 0.009s {{(pid=61995) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:428}} [ 468.122944] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-9c32f4f3-8def-44fc-8df5-ee7ac061fbed None None] Creating reply queue: reply_757213bc08bb49dab178826d88b76f40 [ 468.131365] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-9c32f4f3-8def-44fc-8df5-ee7ac061fbed None None] Expecting reply to msg 59dd7b7df9364b03b9b0efa71cf30df5 in queue reply_757213bc08bb49dab178826d88b76f40 [ 468.144384] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 59dd7b7df9364b03b9b0efa71cf30df5 [ 468.673402] env[61995]: INFO nova.virt.driver [None req-9c32f4f3-8def-44fc-8df5-ee7ac061fbed None None] Loading compute driver 'vmwareapi.VMwareVCDriver' [ 468.742369] env[61995]: DEBUG oslo_concurrency.lockutils [-] Acquiring lock "oslo_vmware_api_lock" by "oslo_vmware.api.VMwareAPISession._create_session" {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 468.742531] env[61995]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" acquired by "oslo_vmware.api.VMwareAPISession._create_session" :: waited 0.000s {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 468.742649] env[61995]: DEBUG oslo_vmware.service [-] Creating suds client with soap_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk' and wsdl_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk/vimService.wsdl' {{(pid=61995) __init__ /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:242}} [ 471.833811] env[61995]: DEBUG oslo_vmware.service [-] Invoking ServiceInstance.RetrieveServiceContent with opID=oslo.vmware-f2f2f635-c3cd-4848-9b2c-d0604198570f {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 471.850738] env[61995]: DEBUG oslo_vmware.api [-] Logging into host: vc1.osci.c.eu-de-1.cloud.sap. {{(pid=61995) _create_session /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:242}} [ 471.850903] env[61995]: DEBUG oslo_vmware.service [-] Invoking SessionManager.Login with opID=oslo.vmware-6f706e95-4282-4e0e-a20c-65d76a038c7b {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 471.881458] env[61995]: INFO oslo_vmware.api [-] Successfully established new session; session ID is ce769. [ 471.881627] env[61995]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" "released" by "oslo_vmware.api.VMwareAPISession._create_session" :: held 3.139s {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 471.882176] env[61995]: INFO nova.virt.vmwareapi.driver [None req-9c32f4f3-8def-44fc-8df5-ee7ac061fbed None None] VMware vCenter version: 7.0.3 [ 471.885559] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a6fed97-47a3-4d65-b9eb-0cfa26621485 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 471.902846] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4b5dece-8035-4633-9fc0-853d747eacb7 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 471.908880] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e516537-76cb-40e5-9aea-4e8d59428055 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 471.915180] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9afde62-a411-4ba4-986a-210c9a7f53c9 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 471.928064] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a169cfd-5fc8-4639-bbfc-c0943aa9a0b1 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 471.934308] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fcf83ab-80fd-4fd3-b8f3-45b3fed97d5b {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 471.964411] env[61995]: DEBUG oslo_vmware.service [-] Invoking ExtensionManager.FindExtension with opID=oslo.vmware-bfc20ab2-da6e-4c3a-b241-600fd836316f {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 471.969273] env[61995]: DEBUG nova.virt.vmwareapi.driver [None req-9c32f4f3-8def-44fc-8df5-ee7ac061fbed None None] Extension org.openstack.compute already exists. {{(pid=61995) _register_openstack_extension /opt/stack/nova/nova/virt/vmwareapi/driver.py:224}} [ 471.971949] env[61995]: INFO nova.compute.provider_config [None req-9c32f4f3-8def-44fc-8df5-ee7ac061fbed None None] No provider configs found in /etc/nova/provider_config/. If files are present, ensure the Nova process has access. [ 471.972595] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-9c32f4f3-8def-44fc-8df5-ee7ac061fbed None None] Expecting reply to msg 7d674b8a3f0240aa89e7a289f020d08a in queue reply_757213bc08bb49dab178826d88b76f40 [ 471.989133] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7d674b8a3f0240aa89e7a289f020d08a [ 472.476341] env[61995]: DEBUG nova.context [None req-9c32f4f3-8def-44fc-8df5-ee7ac061fbed None None] Found 2 cells: 00000000-0000-0000-0000-000000000000(cell0),cd637005-cac0-4a34-a61c-f7d187c17fab(cell1) {{(pid=61995) load_cells /opt/stack/nova/nova/context.py:464}} [ 472.478397] env[61995]: DEBUG oslo_concurrency.lockutils [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] Acquiring lock "00000000-0000-0000-0000-000000000000" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 472.478621] env[61995]: DEBUG oslo_concurrency.lockutils [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] Lock "00000000-0000-0000-0000-000000000000" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 472.479286] env[61995]: DEBUG oslo_concurrency.lockutils [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] Lock "00000000-0000-0000-0000-000000000000" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 472.479762] env[61995]: DEBUG oslo_concurrency.lockutils [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] Acquiring lock "cd637005-cac0-4a34-a61c-f7d187c17fab" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 472.479961] env[61995]: DEBUG oslo_concurrency.lockutils [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] Lock "cd637005-cac0-4a34-a61c-f7d187c17fab" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 472.480977] env[61995]: DEBUG oslo_concurrency.lockutils [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] Lock "cd637005-cac0-4a34-a61c-f7d187c17fab" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 472.501198] env[61995]: INFO dbcounter [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] Registered counter for database nova_cell0 [ 472.509641] env[61995]: INFO dbcounter [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] Registered counter for database nova_cell1 [ 472.512824] env[61995]: DEBUG oslo_db.sqlalchemy.engines [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=61995) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 472.513176] env[61995]: DEBUG oslo_db.sqlalchemy.engines [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=61995) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 472.518105] env[61995]: ERROR nova.db.main.api [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 472.518105] env[61995]: result = function(*args, **kwargs) [ 472.518105] env[61995]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 472.518105] env[61995]: return func(*args, **kwargs) [ 472.518105] env[61995]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 472.518105] env[61995]: result = fn(*args, **kwargs) [ 472.518105] env[61995]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 472.518105] env[61995]: return f(*args, **kwargs) [ 472.518105] env[61995]: File "/opt/stack/nova/nova/objects/service.py", line 553, in _db_service_get_minimum_version [ 472.518105] env[61995]: return db.service_get_minimum_version(context, binaries) [ 472.518105] env[61995]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 472.518105] env[61995]: _check_db_access() [ 472.518105] env[61995]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 472.518105] env[61995]: stacktrace = ''.join(traceback.format_stack()) [ 472.518105] env[61995]: [ 472.518920] env[61995]: ERROR nova.db.main.api [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 472.518920] env[61995]: result = function(*args, **kwargs) [ 472.518920] env[61995]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 472.518920] env[61995]: return func(*args, **kwargs) [ 472.518920] env[61995]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 472.518920] env[61995]: result = fn(*args, **kwargs) [ 472.518920] env[61995]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 472.518920] env[61995]: return f(*args, **kwargs) [ 472.518920] env[61995]: File "/opt/stack/nova/nova/objects/service.py", line 553, in _db_service_get_minimum_version [ 472.518920] env[61995]: return db.service_get_minimum_version(context, binaries) [ 472.518920] env[61995]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 472.518920] env[61995]: _check_db_access() [ 472.518920] env[61995]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 472.518920] env[61995]: stacktrace = ''.join(traceback.format_stack()) [ 472.518920] env[61995]: [ 472.519446] env[61995]: WARNING nova.objects.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] Failed to get minimum service version for cell 00000000-0000-0000-0000-000000000000 [ 472.519446] env[61995]: WARNING nova.objects.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] Failed to get minimum service version for cell cd637005-cac0-4a34-a61c-f7d187c17fab [ 472.519840] env[61995]: DEBUG oslo_concurrency.lockutils [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] Acquiring lock "singleton_lock" {{(pid=61995) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 472.519998] env[61995]: DEBUG oslo_concurrency.lockutils [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] Acquired lock "singleton_lock" {{(pid=61995) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 472.520252] env[61995]: DEBUG oslo_concurrency.lockutils [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] Releasing lock "singleton_lock" {{(pid=61995) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 472.520571] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] Full set of CONF: {{(pid=61995) _wait_for_exit_or_signal /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/service.py:363}} [ 472.520759] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] ******************************************************************************** {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2600}} [ 472.520880] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] Configuration options gathered from: {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2601}} [ 472.520966] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] command line args: ['--config-file', '/etc/nova/nova.conf', '--config-file', '/etc/nova/nova-cpu-common.conf', '--config-file', '/etc/nova/nova-cpu-1.conf'] {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 472.521155] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] config files: ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2603}} [ 472.521284] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] ================================================================================ {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2605}} [ 472.521488] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] allow_resize_to_same_host = True {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.521656] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] arq_binding_timeout = 300 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.521788] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] backdoor_port = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.521915] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] backdoor_socket = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.522087] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] block_device_allocate_retries = 60 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.522294] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] block_device_allocate_retries_interval = 3 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.522474] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] cert = self.pem {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.522637] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] clean_snapshot_directory = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.522803] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] compute_driver = vmwareapi.VMwareVCDriver {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.522972] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] compute_monitors = [] {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.523139] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] config_dir = [] {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.523308] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] config_drive_format = iso9660 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.523441] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] config_file = ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.523603] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] config_source = [] {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.523769] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] console_host = devstack {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.523933] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] control_exchange = nova {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.524134] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] cpu_allocation_ratio = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.524299] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] daemon = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.524609] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] debug = True {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.524799] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] default_access_ip_network_name = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.524975] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] default_availability_zone = nova {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.525134] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] default_ephemeral_format = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.525293] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] default_green_pool_size = 1000 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.525532] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] default_log_levels = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'glanceclient=WARN', 'oslo.privsep.daemon=INFO'] {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.525696] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] default_schedule_zone = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.525850] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] disk_allocation_ratio = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.526010] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] enable_new_services = True {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.526183] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] enabled_apis = ['osapi_compute'] {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.526345] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] enabled_ssl_apis = [] {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.526501] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] flat_injected = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.526658] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] force_config_drive = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.526854] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] force_raw_images = True {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.526986] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] graceful_shutdown_timeout = 5 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.527146] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] heal_instance_info_cache_interval = 60 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.527354] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] host = cpu-1 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.527522] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] initial_cpu_allocation_ratio = 4.0 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.527864] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] initial_disk_allocation_ratio = 1.0 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.527864] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] initial_ram_allocation_ratio = 1.0 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.528061] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] injected_network_template = /opt/stack/nova/nova/virt/interfaces.template {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.528232] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] instance_build_timeout = 0 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.528392] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] instance_delete_interval = 300 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.528557] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] instance_format = [instance: %(uuid)s] {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.528723] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] instance_name_template = instance-%08x {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.528883] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] instance_usage_audit = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.529048] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] instance_usage_audit_period = month {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.529211] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] instance_uuid_format = [instance: %(uuid)s] {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.529373] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] instances_path = /opt/stack/data/nova/instances {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.529535] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] internal_service_availability_zone = internal {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.529688] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] key = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.529873] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] live_migration_retry_count = 30 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.530040] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] log_color = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.530203] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] log_config_append = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.530369] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] log_date_format = %Y-%m-%d %H:%M:%S {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.530526] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] log_dir = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.530680] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] log_file = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.530825] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] log_options = True {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.530996] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] log_rotate_interval = 1 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.531162] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] log_rotate_interval_type = days {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.531327] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] log_rotation_type = none {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.531453] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] logging_context_format_string = %(color)s%(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(project_name)s %(user_name)s%(color)s] %(instance)s%(color)s%(message)s {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.531575] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] logging_debug_format_suffix = {{(pid=%(process)d) %(funcName)s %(pathname)s:%(lineno)d}} {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.531737] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] logging_default_format_string = %(color)s%(levelname)s %(name)s [-%(color)s] %(instance)s%(color)s%(message)s {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.531901] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] logging_exception_prefix = ERROR %(name)s %(instance)s {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.532037] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] logging_user_identity_format = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.532298] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] long_rpc_timeout = 1800 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.532517] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] max_concurrent_builds = 10 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.532689] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] max_concurrent_live_migrations = 1 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.532850] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] max_concurrent_snapshots = 5 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.533013] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] max_local_block_devices = 3 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.533172] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] max_logfile_count = 30 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.533330] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] max_logfile_size_mb = 200 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.533487] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] maximum_instance_delete_attempts = 5 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.533653] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] metadata_listen = 0.0.0.0 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.533821] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] metadata_listen_port = 8775 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.533993] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] metadata_workers = 2 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.534154] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] migrate_max_retries = -1 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.534317] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] mkisofs_cmd = genisoimage {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.534583] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] my_block_storage_ip = 10.180.1.21 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.534815] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] my_ip = 10.180.1.21 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.534995] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] network_allocate_retries = 0 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.535178] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] non_inheritable_image_properties = ['cache_in_nova', 'bittorrent'] {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.535347] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] osapi_compute_listen = 0.0.0.0 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.535510] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] osapi_compute_listen_port = 8774 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.535677] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] osapi_compute_unique_server_name_scope = {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.535846] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] osapi_compute_workers = 2 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.536019] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] password_length = 12 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.536234] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] periodic_enable = True {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.536400] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] periodic_fuzzy_delay = 60 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.536594] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] pointer_model = usbtablet {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.536767] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] preallocate_images = none {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.536931] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] publish_errors = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.537060] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] pybasedir = /opt/stack/nova {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.537217] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] ram_allocation_ratio = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.537373] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] rate_limit_burst = 0 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.537537] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] rate_limit_except_level = CRITICAL {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.537694] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] rate_limit_interval = 0 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.537853] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] reboot_timeout = 0 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.538012] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] reclaim_instance_interval = 0 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.538164] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] record = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.538331] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] reimage_timeout_per_gb = 60 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.538494] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] report_interval = 120 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.538652] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] rescue_timeout = 0 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.538809] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] reserved_host_cpus = 0 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.538969] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] reserved_host_disk_mb = 0 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.539127] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] reserved_host_memory_mb = 512 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.539285] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] reserved_huge_pages = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.539443] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] resize_confirm_window = 0 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.539600] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] resize_fs_using_block_device = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.539772] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] resume_guests_state_on_host_boot = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.539959] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] rootwrap_config = /etc/nova/rootwrap.conf {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.540138] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] rpc_response_timeout = 60 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.540301] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] run_external_periodic_tasks = True {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.540465] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] running_deleted_instance_action = reap {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.540625] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] running_deleted_instance_poll_interval = 1800 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.540803] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] running_deleted_instance_timeout = 0 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.540976] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] scheduler_instance_sync_interval = 120 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.541144] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] service_down_time = 720 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.541311] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] servicegroup_driver = db {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.541473] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] shelved_offload_time = 0 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.541664] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] shelved_poll_interval = 3600 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.541833] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] shutdown_timeout = 0 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.541994] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] source_is_ipv6 = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.542176] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] ssl_only = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.542426] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] state_path = /opt/stack/data/n-cpu-1 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.542644] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] sync_power_state_interval = 600 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.542817] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] sync_power_state_pool_size = 1000 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.542991] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] syslog_log_facility = LOG_USER {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.543150] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] tempdir = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.543310] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] timeout_nbd = 10 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.543476] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] transport_url = **** {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.543635] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] update_resources_interval = 0 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.543796] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] use_cow_images = True {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.543953] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] use_eventlog = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.544130] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] use_journal = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.544289] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] use_json = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.544450] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] use_rootwrap_daemon = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.544669] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] use_stderr = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.544939] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] use_syslog = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.545109] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] vcpu_pin_set = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.545282] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] vif_plugging_is_fatal = True {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.545453] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] vif_plugging_timeout = 300 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.545620] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] virt_mkfs = [] {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.545783] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] volume_usage_poll_interval = 0 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.545948] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] watch_log_file = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.546114] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] web = /usr/share/spice-html5 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 472.546296] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] oslo_concurrency.disable_process_locking = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.546592] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] oslo_concurrency.lock_path = /opt/stack/data/n-cpu-1 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.546775] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] oslo_messaging_metrics.metrics_buffer_size = 1000 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.546940] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] oslo_messaging_metrics.metrics_enabled = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.547109] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] oslo_messaging_metrics.metrics_process_name = {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.547275] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] oslo_messaging_metrics.metrics_socket_file = /var/tmp/metrics_collector.sock {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.547437] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] oslo_messaging_metrics.metrics_thread_stop_timeout = 10 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.547616] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] api.auth_strategy = keystone {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.547784] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] api.compute_link_prefix = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.547955] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] api.config_drive_skip_versions = 1.0 2007-01-19 2007-03-01 2007-08-29 2007-10-10 2007-12-15 2008-02-01 2008-09-01 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.548141] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] api.dhcp_domain = novalocal {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.548311] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] api.enable_instance_password = True {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.548475] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] api.glance_link_prefix = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.548641] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] api.instance_list_cells_batch_fixed_size = 100 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.548815] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] api.instance_list_cells_batch_strategy = distributed {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.548980] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] api.instance_list_per_project_cells = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.549404] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] api.list_records_by_skipping_down_cells = True {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.549582] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] api.local_metadata_per_cell = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.549769] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] api.max_limit = 1000 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.549954] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] api.metadata_cache_expiration = 15 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.550128] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] api.neutron_default_tenant_id = default {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.550298] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] api.response_validation = warn {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.550464] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] api.use_neutron_default_nets = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.550631] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] api.vendordata_dynamic_connect_timeout = 5 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.550812] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] api.vendordata_dynamic_failure_fatal = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.551000] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] api.vendordata_dynamic_read_timeout = 5 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.551175] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] api.vendordata_dynamic_ssl_certfile = {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.551343] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] api.vendordata_dynamic_targets = [] {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.551507] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] api.vendordata_jsonfile_path = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.551687] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] api.vendordata_providers = ['StaticJSON'] {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.551877] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] cache.backend = dogpile.cache.memcached {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.552060] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] cache.backend_argument = **** {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.552266] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] cache.config_prefix = cache.oslo {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.552444] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] cache.dead_timeout = 60.0 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.552610] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] cache.debug_cache_backend = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.552773] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] cache.enable_retry_client = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.552936] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] cache.enable_socket_keepalive = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.553105] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] cache.enabled = True {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.553268] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] cache.enforce_fips_mode = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.553432] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] cache.expiration_time = 600 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.553595] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] cache.hashclient_retry_attempts = 2 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.553768] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] cache.hashclient_retry_delay = 1.0 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.553945] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] cache.memcache_dead_retry = 300 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.554101] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] cache.memcache_password = **** {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.554263] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] cache.memcache_pool_connection_get_timeout = 10 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.554425] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] cache.memcache_pool_flush_on_reconnect = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.554590] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] cache.memcache_pool_maxsize = 10 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.554900] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] cache.memcache_pool_unused_timeout = 60 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.555105] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] cache.memcache_sasl_enabled = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.555293] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] cache.memcache_servers = ['localhost:11211'] {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.555465] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] cache.memcache_socket_timeout = 1.0 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.555629] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] cache.memcache_username = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.555798] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] cache.proxies = [] {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.556121] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] cache.redis_db = 0 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.556314] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] cache.redis_password = **** {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.556495] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] cache.redis_sentinel_service_name = mymaster {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.556677] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] cache.redis_sentinels = ['localhost:26379'] {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.556852] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] cache.redis_server = localhost:6379 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.557023] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] cache.redis_socket_timeout = 1.0 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.557186] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] cache.redis_username = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.557352] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] cache.retry_attempts = 2 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.557518] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] cache.retry_delay = 0.0 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.557682] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] cache.socket_keepalive_count = 1 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.557847] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] cache.socket_keepalive_idle = 1 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.558012] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] cache.socket_keepalive_interval = 1 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.558181] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] cache.tls_allowed_ciphers = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.558399] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] cache.tls_cafile = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.558571] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] cache.tls_certfile = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.558737] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] cache.tls_enabled = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.558899] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] cache.tls_keyfile = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.559074] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] cinder.auth_section = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.559250] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] cinder.auth_type = password {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.559414] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] cinder.cafile = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.559592] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] cinder.catalog_info = volumev3::publicURL {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.559772] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] cinder.certfile = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.559952] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] cinder.collect_timing = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.560132] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] cinder.cross_az_attach = True {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.560299] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] cinder.debug = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.560458] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] cinder.endpoint_template = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.560625] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] cinder.http_retries = 3 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.560817] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] cinder.insecure = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.560995] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] cinder.keyfile = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.561171] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] cinder.os_region_name = RegionOne {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.561338] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] cinder.split_loggers = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.561499] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] cinder.timeout = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.561668] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] compute.consecutive_build_service_disable_threshold = 10 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.561830] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] compute.cpu_dedicated_set = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.561987] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] compute.cpu_shared_set = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.562176] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] compute.image_type_exclude_list = [] {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.562354] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] compute.live_migration_wait_for_vif_plug = True {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.562521] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] compute.max_concurrent_disk_ops = 0 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.562684] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] compute.max_disk_devices_to_attach = -1 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.562848] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] compute.packing_host_numa_cells_allocation_strategy = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.563018] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] compute.provider_config_location = /etc/nova/provider_config/ {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.563181] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] compute.resource_provider_association_refresh = 300 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.563342] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] compute.sharing_providers_max_uuids_per_request = 200 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.563503] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] compute.shutdown_retry_interval = 10 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.563680] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] compute.vmdk_allowed_types = ['streamOptimized', 'monolithicSparse'] {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.563858] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] conductor.workers = 2 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.564091] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] console.allowed_origins = [] {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.564210] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] console.ssl_ciphers = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.564380] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] console.ssl_minimum_version = default {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.564548] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] consoleauth.enforce_session_timeout = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.564717] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] consoleauth.token_ttl = 600 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.564961] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] cyborg.cafile = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.565211] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] cyborg.certfile = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.565402] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] cyborg.collect_timing = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.565568] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] cyborg.connect_retries = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.565731] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] cyborg.connect_retry_delay = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.565893] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] cyborg.endpoint_override = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.566058] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] cyborg.insecure = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.566218] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] cyborg.keyfile = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.566377] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] cyborg.max_version = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.566536] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] cyborg.min_version = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.566693] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] cyborg.region_name = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.566852] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] cyborg.retriable_status_codes = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.567012] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] cyborg.service_name = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.567181] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] cyborg.service_type = accelerator {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.567343] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] cyborg.split_loggers = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.567500] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] cyborg.status_code_retries = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.567657] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] cyborg.status_code_retry_delay = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.567813] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] cyborg.timeout = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.567994] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] cyborg.valid_interfaces = ['internal', 'public'] {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.568168] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] cyborg.version = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.568346] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] database.backend = sqlalchemy {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.568514] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] database.connection = **** {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.568680] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] database.connection_debug = 0 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.568849] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] database.connection_parameters = {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.569015] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] database.connection_recycle_time = 3600 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.569175] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] database.connection_trace = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.569335] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] database.db_inc_retry_interval = True {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.569497] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] database.db_max_retries = 20 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.569656] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] database.db_max_retry_interval = 10 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.569850] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] database.db_retry_interval = 1 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.570027] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] database.max_overflow = 50 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.570191] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] database.max_pool_size = 5 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.570351] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] database.max_retries = 10 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.570521] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] database.mysql_sql_mode = TRADITIONAL {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.570680] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] database.mysql_wsrep_sync_wait = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.570863] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] database.pool_timeout = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.571032] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] database.retry_interval = 10 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.571191] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] database.slave_connection = **** {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.571352] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] database.sqlite_synchronous = True {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.571512] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] database.use_db_reconnect = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.571690] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] api_database.backend = sqlalchemy {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.571858] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] api_database.connection = **** {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.572061] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] api_database.connection_debug = 0 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.572258] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] api_database.connection_parameters = {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.572427] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] api_database.connection_recycle_time = 3600 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.572590] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] api_database.connection_trace = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.572754] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] api_database.db_inc_retry_interval = True {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.572921] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] api_database.db_max_retries = 20 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.573082] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] api_database.db_max_retry_interval = 10 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.573243] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] api_database.db_retry_interval = 1 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.573406] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] api_database.max_overflow = 50 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.573568] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] api_database.max_pool_size = 5 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.573729] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] api_database.max_retries = 10 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.573902] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] api_database.mysql_sql_mode = TRADITIONAL {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.574062] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] api_database.mysql_wsrep_sync_wait = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.574219] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] api_database.pool_timeout = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.574382] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] api_database.retry_interval = 10 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.574539] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] api_database.slave_connection = **** {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.574698] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] api_database.sqlite_synchronous = True {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.574872] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] devices.enabled_mdev_types = [] {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.575137] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] ephemeral_storage_encryption.cipher = aes-xts-plain64 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.575399] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] ephemeral_storage_encryption.default_format = luks {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.575578] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] ephemeral_storage_encryption.enabled = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.575748] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] ephemeral_storage_encryption.key_size = 512 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.575922] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] glance.api_servers = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.576099] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] glance.cafile = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.576324] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] glance.certfile = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.576424] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] glance.collect_timing = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.576582] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] glance.connect_retries = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.576739] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] glance.connect_retry_delay = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.576901] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] glance.debug = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.577063] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] glance.default_trusted_certificate_ids = [] {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.577222] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] glance.enable_certificate_validation = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.577382] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] glance.enable_rbd_download = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.577538] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] glance.endpoint_override = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.577700] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] glance.insecure = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.577857] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] glance.keyfile = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.578013] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] glance.max_version = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.578166] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] glance.min_version = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.578326] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] glance.num_retries = 3 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.578492] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] glance.rbd_ceph_conf = {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.578652] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] glance.rbd_connect_timeout = 5 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.578818] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] glance.rbd_pool = {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.578985] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] glance.rbd_user = {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.579140] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] glance.region_name = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.579298] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] glance.retriable_status_codes = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.579453] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] glance.service_name = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.579617] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] glance.service_type = image {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.579845] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] glance.split_loggers = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.580086] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] glance.status_code_retries = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.580187] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] glance.status_code_retry_delay = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.580344] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] glance.timeout = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.580531] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] glance.valid_interfaces = ['internal', 'public'] {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.580693] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] glance.verify_glance_signatures = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.580949] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] glance.version = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.581127] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] guestfs.debug = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.581296] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] mks.enabled = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.581648] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] mks.mksproxy_base_url = http://127.0.0.1:6090/ {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.581838] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] image_cache.manager_interval = 2400 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.582013] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] image_cache.precache_concurrency = 1 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.582216] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] image_cache.remove_unused_base_images = True {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.582398] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] image_cache.remove_unused_original_minimum_age_seconds = 86400 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.582570] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] image_cache.remove_unused_resized_minimum_age_seconds = 3600 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.582749] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] image_cache.subdirectory_name = _base {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.582928] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] ironic.api_max_retries = 60 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.583095] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] ironic.api_retry_interval = 2 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.583256] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] ironic.auth_section = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.583417] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] ironic.auth_type = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.583575] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] ironic.cafile = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.583736] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] ironic.certfile = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.583901] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] ironic.collect_timing = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.584079] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] ironic.conductor_group = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.584242] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] ironic.connect_retries = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.584401] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] ironic.connect_retry_delay = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.584560] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] ironic.endpoint_override = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.584720] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] ironic.insecure = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.584877] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] ironic.keyfile = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.585106] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] ironic.max_version = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.585283] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] ironic.min_version = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.585540] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] ironic.peer_list = [] {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.585712] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] ironic.region_name = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.585876] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] ironic.retriable_status_codes = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.586045] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] ironic.serial_console_state_timeout = 10 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.586205] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] ironic.service_name = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.586375] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] ironic.service_type = baremetal {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.586537] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] ironic.shard = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.586701] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] ironic.split_loggers = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.586861] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] ironic.status_code_retries = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.587022] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] ironic.status_code_retry_delay = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.587180] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] ironic.timeout = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.587362] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] ironic.valid_interfaces = ['internal', 'public'] {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.587522] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] ironic.version = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.587703] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] key_manager.backend = nova.keymgr.conf_key_mgr.ConfKeyManager {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.587876] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] key_manager.fixed_key = **** {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.588071] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] barbican.auth_endpoint = http://localhost/identity/v3 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.588238] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] barbican.barbican_api_version = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.588397] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] barbican.barbican_endpoint = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.588567] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] barbican.barbican_endpoint_type = public {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.588724] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] barbican.barbican_region_name = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.588881] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] barbican.cafile = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.589038] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] barbican.certfile = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.589197] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] barbican.collect_timing = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.589354] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] barbican.insecure = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.589509] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] barbican.keyfile = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.589670] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] barbican.number_of_retries = 60 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.589862] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] barbican.retry_delay = 1 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.590036] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] barbican.send_service_user_token = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.590200] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] barbican.split_loggers = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.590358] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] barbican.timeout = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.590517] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] barbican.verify_ssl = True {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.590675] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] barbican.verify_ssl_path = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.590878] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] barbican_service_user.auth_section = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.591056] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] barbican_service_user.auth_type = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.591213] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] barbican_service_user.cafile = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.591370] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] barbican_service_user.certfile = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.591532] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] barbican_service_user.collect_timing = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.591689] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] barbican_service_user.insecure = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.591844] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] barbican_service_user.keyfile = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.592011] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] barbican_service_user.split_loggers = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.592201] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] barbican_service_user.timeout = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.592372] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] vault.approle_role_id = **** {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.592533] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] vault.approle_secret_id = **** {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.592705] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] vault.kv_mountpoint = secret {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.592863] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] vault.kv_path = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.593029] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] vault.kv_version = 2 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.593186] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] vault.namespace = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.593343] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] vault.root_token_id = **** {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.593499] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] vault.ssl_ca_crt_file = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.593661] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] vault.timeout = 60.0 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.593821] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] vault.use_ssl = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.593987] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] vault.vault_url = http://127.0.0.1:8200 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.594154] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] keystone.auth_section = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.594315] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] keystone.auth_type = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.594471] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] keystone.cafile = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.594628] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] keystone.certfile = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.594789] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] keystone.collect_timing = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.594948] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] keystone.connect_retries = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.595167] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] keystone.connect_retry_delay = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.595354] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] keystone.endpoint_override = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.595602] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] keystone.insecure = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.595775] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] keystone.keyfile = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.595938] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] keystone.max_version = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.596107] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] keystone.min_version = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.596269] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] keystone.region_name = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.596427] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] keystone.retriable_status_codes = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.596586] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] keystone.service_name = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.596755] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] keystone.service_type = identity {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.596918] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] keystone.split_loggers = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.597076] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] keystone.status_code_retries = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.597234] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] keystone.status_code_retry_delay = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.597390] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] keystone.timeout = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.597568] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] keystone.valid_interfaces = ['internal', 'public'] {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.597730] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] keystone.version = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.597930] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] libvirt.connection_uri = {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.598090] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] libvirt.cpu_mode = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.598254] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] libvirt.cpu_model_extra_flags = [] {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.598476] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] libvirt.cpu_models = [] {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.598671] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] libvirt.cpu_power_governor_high = performance {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.598848] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] libvirt.cpu_power_governor_low = powersave {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.599017] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] libvirt.cpu_power_management = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.599191] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] libvirt.cpu_power_management_strategy = cpu_state {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.599355] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] libvirt.device_detach_attempts = 8 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.599519] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] libvirt.device_detach_timeout = 20 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.599687] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] libvirt.disk_cachemodes = [] {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.599870] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] libvirt.disk_prefix = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.600055] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] libvirt.enabled_perf_events = [] {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.600227] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] libvirt.file_backed_memory = 0 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.600395] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] libvirt.gid_maps = [] {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.600556] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] libvirt.hw_disk_discard = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.600715] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] libvirt.hw_machine_type = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.600884] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] libvirt.images_rbd_ceph_conf = {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.601050] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] libvirt.images_rbd_glance_copy_poll_interval = 15 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.601213] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] libvirt.images_rbd_glance_copy_timeout = 600 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.601378] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] libvirt.images_rbd_glance_store_name = {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.601543] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] libvirt.images_rbd_pool = rbd {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.601709] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] libvirt.images_type = default {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.601867] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] libvirt.images_volume_group = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.602028] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] libvirt.inject_key = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.602213] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] libvirt.inject_partition = -2 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.602377] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] libvirt.inject_password = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.602539] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] libvirt.iscsi_iface = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.602698] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] libvirt.iser_use_multipath = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.602863] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] libvirt.live_migration_bandwidth = 0 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.603030] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] libvirt.live_migration_completion_timeout = 800 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.603192] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] libvirt.live_migration_downtime = 500 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.603356] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] libvirt.live_migration_downtime_delay = 75 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.603516] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] libvirt.live_migration_downtime_steps = 10 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.603677] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] libvirt.live_migration_inbound_addr = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.603841] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] libvirt.live_migration_permit_auto_converge = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.604037] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] libvirt.live_migration_permit_post_copy = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.604189] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] libvirt.live_migration_scheme = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.604366] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] libvirt.live_migration_timeout_action = abort {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.604529] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] libvirt.live_migration_tunnelled = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.604687] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] libvirt.live_migration_uri = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.604849] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] libvirt.live_migration_with_native_tls = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.605008] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] libvirt.max_queues = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.605172] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] libvirt.mem_stats_period_seconds = 10 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.605484] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] libvirt.migration_inbound_addr = 10.180.1.21 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.605743] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] libvirt.nfs_mount_options = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.606399] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] libvirt.nfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.606592] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] libvirt.num_aoe_discover_tries = 3 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.606768] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] libvirt.num_iser_scan_tries = 5 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.606935] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] libvirt.num_memory_encrypted_guests = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.607108] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] libvirt.num_nvme_discover_tries = 5 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.607273] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] libvirt.num_pcie_ports = 0 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.607440] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] libvirt.num_volume_scan_tries = 5 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.607606] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] libvirt.pmem_namespaces = [] {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.607768] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] libvirt.quobyte_client_cfg = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.608128] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] libvirt.quobyte_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.608316] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] libvirt.rbd_connect_timeout = 5 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.608488] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] libvirt.rbd_destroy_volume_retries = 12 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.608656] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] libvirt.rbd_destroy_volume_retry_interval = 5 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.608821] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] libvirt.rbd_secret_uuid = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.609048] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] libvirt.rbd_user = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.609230] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] libvirt.realtime_scheduler_priority = 1 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.609407] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] libvirt.remote_filesystem_transport = ssh {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.609573] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] libvirt.rescue_image_id = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.609743] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] libvirt.rescue_kernel_id = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.609930] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] libvirt.rescue_ramdisk_id = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.610152] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] libvirt.rng_dev_path = /dev/urandom {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.610323] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] libvirt.rx_queue_size = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.610511] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] libvirt.smbfs_mount_options = {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.610814] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] libvirt.smbfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.611002] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] libvirt.snapshot_compression = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.611168] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] libvirt.snapshot_image_format = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.611388] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] libvirt.snapshots_directory = /opt/stack/data/nova/instances/snapshots {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.611558] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] libvirt.sparse_logical_volumes = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.611725] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] libvirt.swtpm_enabled = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.611896] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] libvirt.swtpm_group = tss {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.612092] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] libvirt.swtpm_user = tss {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.612292] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] libvirt.sysinfo_serial = unique {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.612458] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] libvirt.tb_cache_size = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.612620] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] libvirt.tx_queue_size = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.612788] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] libvirt.uid_maps = [] {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.612957] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] libvirt.use_virtio_for_bridges = True {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.613130] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] libvirt.virt_type = kvm {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.613304] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] libvirt.volume_clear = zero {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.613471] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] libvirt.volume_clear_size = 0 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.613640] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] libvirt.volume_use_multipath = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.613804] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] libvirt.vzstorage_cache_path = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.613975] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] libvirt.vzstorage_log_path = /var/log/vstorage/%(cluster_name)s/nova.log.gz {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.614146] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] libvirt.vzstorage_mount_group = qemu {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.614311] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] libvirt.vzstorage_mount_opts = [] {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.614481] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] libvirt.vzstorage_mount_perms = 0770 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.614759] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] libvirt.vzstorage_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.614940] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] libvirt.vzstorage_mount_user = stack {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.615109] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] libvirt.wait_soft_reboot_seconds = 120 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.615347] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] neutron.auth_section = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.615548] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] neutron.auth_type = password {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.615802] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] neutron.cafile = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.615985] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] neutron.certfile = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.616170] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] neutron.collect_timing = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.616337] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] neutron.connect_retries = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.616500] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] neutron.connect_retry_delay = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.616673] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] neutron.default_floating_pool = public {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.616834] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] neutron.endpoint_override = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.616999] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] neutron.extension_sync_interval = 600 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.617165] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] neutron.http_retries = 3 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.617327] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] neutron.insecure = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.617489] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] neutron.keyfile = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.617648] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] neutron.max_version = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.617823] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] neutron.metadata_proxy_shared_secret = **** {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.617983] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] neutron.min_version = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.618150] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] neutron.ovs_bridge = br-int {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.618317] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] neutron.physnets = [] {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.618486] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] neutron.region_name = RegionOne {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.618645] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] neutron.retriable_status_codes = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.618814] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] neutron.service_metadata_proxy = True {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.618981] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] neutron.service_name = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.619150] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] neutron.service_type = network {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.619312] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] neutron.split_loggers = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.619470] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] neutron.status_code_retries = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.619626] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] neutron.status_code_retry_delay = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.619822] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] neutron.timeout = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.620028] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] neutron.valid_interfaces = ['internal', 'public'] {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.620199] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] neutron.version = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.620376] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] notifications.bdms_in_notifications = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.620554] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] notifications.default_level = INFO {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.620733] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] notifications.notification_format = unversioned {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.620924] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] notifications.notify_on_state_change = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.621107] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] notifications.versioned_notifications_topics = ['versioned_notifications'] {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.621283] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] pci.alias = [] {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.621455] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] pci.device_spec = [] {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.621621] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] pci.report_in_placement = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.621791] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] placement.auth_section = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.621963] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] placement.auth_type = password {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.622178] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] placement.auth_url = http://10.180.1.21/identity {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.622358] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] placement.cafile = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.622519] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] placement.certfile = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.622684] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] placement.collect_timing = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.622842] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] placement.connect_retries = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.623005] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] placement.connect_retry_delay = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.623201] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] placement.default_domain_id = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.623390] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] placement.default_domain_name = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.623554] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] placement.domain_id = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.623713] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] placement.domain_name = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.623874] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] placement.endpoint_override = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.624047] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] placement.insecure = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.624210] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] placement.keyfile = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.624366] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] placement.max_version = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.624521] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] placement.min_version = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.624690] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] placement.password = **** {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.624848] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] placement.project_domain_id = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.625014] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] placement.project_domain_name = Default {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.625181] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] placement.project_id = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.625352] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] placement.project_name = service {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.625603] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] placement.region_name = RegionOne {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.625851] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] placement.retriable_status_codes = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.626038] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] placement.service_name = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.626221] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] placement.service_type = placement {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.626390] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] placement.split_loggers = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.626552] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] placement.status_code_retries = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.626712] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] placement.status_code_retry_delay = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.626874] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] placement.system_scope = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.627033] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] placement.timeout = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.627190] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] placement.trust_id = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.627347] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] placement.user_domain_id = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.627516] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] placement.user_domain_name = Default {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.627675] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] placement.user_id = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.627849] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] placement.username = nova {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.628064] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] placement.valid_interfaces = ['internal', 'public'] {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.628208] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] placement.version = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.628387] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] quota.cores = 20 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.628552] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] quota.count_usage_from_placement = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.628725] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] quota.driver = nova.quota.DbQuotaDriver {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.628894] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] quota.injected_file_content_bytes = 10240 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.629063] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] quota.injected_file_path_length = 255 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.629230] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] quota.injected_files = 5 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.629398] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] quota.instances = 10 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.629563] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] quota.key_pairs = 100 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.629728] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] quota.metadata_items = 128 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.629928] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] quota.ram = 51200 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.630099] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] quota.recheck_quota = True {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.630270] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] quota.server_group_members = 10 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.630436] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] quota.server_groups = 10 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.630611] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] scheduler.discover_hosts_in_cells_interval = -1 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.630795] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] scheduler.enable_isolated_aggregate_filtering = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.630974] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] scheduler.image_metadata_prefilter = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.631138] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] scheduler.limit_tenants_to_placement_aggregate = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.631305] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] scheduler.max_attempts = 3 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.631469] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] scheduler.max_placement_results = 1000 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.631635] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] scheduler.placement_aggregate_required_for_tenants = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.631797] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] scheduler.query_placement_for_image_type_support = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.631958] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] scheduler.query_placement_for_routed_network_aggregates = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.632183] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] scheduler.workers = 2 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.632376] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] filter_scheduler.aggregate_image_properties_isolation_namespace = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.632551] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] filter_scheduler.aggregate_image_properties_isolation_separator = . {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.632729] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] filter_scheduler.available_filters = ['nova.scheduler.filters.all_filters'] {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.632900] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] filter_scheduler.build_failure_weight_multiplier = 1000000.0 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.633068] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] filter_scheduler.cpu_weight_multiplier = 1.0 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.633234] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] filter_scheduler.cross_cell_move_weight_multiplier = 1000000.0 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.633396] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] filter_scheduler.disk_weight_multiplier = 1.0 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.633583] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] filter_scheduler.enabled_filters = ['ComputeFilter', 'ComputeCapabilitiesFilter', 'ImagePropertiesFilter', 'ServerGroupAntiAffinityFilter', 'ServerGroupAffinityFilter', 'SameHostFilter', 'DifferentHostFilter'] {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.633754] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] filter_scheduler.host_subset_size = 1 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.633918] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] filter_scheduler.hypervisor_version_weight_multiplier = 1.0 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.634078] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] filter_scheduler.image_properties_default_architecture = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.634239] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] filter_scheduler.io_ops_weight_multiplier = -1.0 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.634404] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] filter_scheduler.isolated_hosts = [] {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.634567] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] filter_scheduler.isolated_images = [] {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.634733] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] filter_scheduler.max_instances_per_host = 50 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.634895] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] filter_scheduler.max_io_ops_per_host = 8 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.635061] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] filter_scheduler.num_instances_weight_multiplier = 0.0 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.635277] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] filter_scheduler.pci_in_placement = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.635553] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] filter_scheduler.pci_weight_multiplier = 1.0 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.635742] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] filter_scheduler.ram_weight_multiplier = 1.0 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.635994] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] filter_scheduler.restrict_isolated_hosts_to_isolated_images = True {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.636188] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] filter_scheduler.shuffle_best_same_weighed_hosts = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.636422] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] filter_scheduler.soft_affinity_weight_multiplier = 1.0 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.636610] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] filter_scheduler.soft_anti_affinity_weight_multiplier = 1.0 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.636780] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] filter_scheduler.track_instance_changes = True {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.636963] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] filter_scheduler.weight_classes = ['nova.scheduler.weights.all_weighers'] {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.637137] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] metrics.required = True {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.637307] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] metrics.weight_multiplier = 1.0 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.637473] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] metrics.weight_of_unavailable = -10000.0 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.637643] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] metrics.weight_setting = [] {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.637956] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] serial_console.base_url = ws://127.0.0.1:6083/ {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.638132] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] serial_console.enabled = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.638310] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] serial_console.port_range = 10000:20000 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.638483] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] serial_console.proxyclient_address = 127.0.0.1 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.638653] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] serial_console.serialproxy_host = 0.0.0.0 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.638820] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] serial_console.serialproxy_port = 6083 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.638990] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] service_user.auth_section = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.639162] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] service_user.auth_type = password {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.639325] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] service_user.cafile = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.639485] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] service_user.certfile = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.639648] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] service_user.collect_timing = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.639835] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] service_user.insecure = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.640062] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] service_user.keyfile = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.640199] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] service_user.send_service_user_token = True {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.640365] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] service_user.split_loggers = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.640525] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] service_user.timeout = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.640695] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] spice.agent_enabled = True {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.640918] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] spice.enabled = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.641362] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] spice.html5proxy_base_url = http://127.0.0.1:6082/spice_auto.html {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.641665] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] spice.html5proxy_host = 0.0.0.0 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.641948] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] spice.html5proxy_port = 6082 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.642209] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] spice.image_compression = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.642488] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] spice.jpeg_compression = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.642757] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] spice.playback_compression = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.643014] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] spice.server_listen = 127.0.0.1 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.643242] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] spice.server_proxyclient_address = 127.0.0.1 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.643412] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] spice.streaming_mode = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.643619] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] spice.zlib_compression = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.643802] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] upgrade_levels.baseapi = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.643977] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] upgrade_levels.compute = auto {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.644153] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] upgrade_levels.conductor = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.644311] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] upgrade_levels.scheduler = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.644493] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] vendordata_dynamic_auth.auth_section = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.644660] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] vendordata_dynamic_auth.auth_type = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.644818] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] vendordata_dynamic_auth.cafile = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.644982] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] vendordata_dynamic_auth.certfile = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.645144] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] vendordata_dynamic_auth.collect_timing = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.645304] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] vendordata_dynamic_auth.insecure = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.645462] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] vendordata_dynamic_auth.keyfile = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.645700] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] vendordata_dynamic_auth.split_loggers = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.645880] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] vendordata_dynamic_auth.timeout = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.646153] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] vmware.api_retry_count = 10 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.646331] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] vmware.ca_file = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.646507] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] vmware.cache_prefix = devstack-image-cache {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.646677] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] vmware.cluster_name = testcl1 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.646844] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] vmware.connection_pool_size = 10 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.647007] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] vmware.console_delay_seconds = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.647175] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] vmware.datastore_regex = ^datastore.* {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.647396] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] vmware.host_ip = vc1.osci.c.eu-de-1.cloud.sap {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.647563] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] vmware.host_password = **** {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.647735] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] vmware.host_port = 443 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.647903] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] vmware.host_username = administrator@vsphere.local {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.648085] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] vmware.insecure = True {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.648251] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] vmware.integration_bridge = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.648415] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] vmware.maximum_objects = 100 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.648573] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] vmware.pbm_default_policy = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.648734] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] vmware.pbm_enabled = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.648890] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] vmware.pbm_wsdl_location = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.649061] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] vmware.serial_log_dir = /opt/vmware/vspc {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.649219] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] vmware.serial_port_proxy_uri = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.649373] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] vmware.serial_port_service_uri = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.649539] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] vmware.task_poll_interval = 0.5 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.649710] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] vmware.use_linked_clone = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.649901] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] vmware.vnc_keymap = en-us {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.650073] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] vmware.vnc_port = 5900 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.650238] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] vmware.vnc_port_total = 10000 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.650422] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] vnc.auth_schemes = ['none'] {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.650593] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] vnc.enabled = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.650928] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] vnc.novncproxy_base_url = http://127.0.0.1:6080/vnc_auto.html {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.651121] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] vnc.novncproxy_host = 0.0.0.0 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.651295] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] vnc.novncproxy_port = 6080 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.651476] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] vnc.server_listen = 127.0.0.1 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.651650] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] vnc.server_proxyclient_address = 127.0.0.1 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.651811] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] vnc.vencrypt_ca_certs = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.651972] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] vnc.vencrypt_client_cert = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.652143] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] vnc.vencrypt_client_key = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.652318] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] workarounds.disable_compute_service_check_for_ffu = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.652484] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] workarounds.disable_deep_image_inspection = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.652647] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] workarounds.disable_fallback_pcpu_query = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.652808] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] workarounds.disable_group_policy_check_upcall = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.652970] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] workarounds.disable_libvirt_livesnapshot = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.653132] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] workarounds.disable_rootwrap = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.653292] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] workarounds.enable_numa_live_migration = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.653451] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] workarounds.enable_qemu_monitor_announce_self = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.653612] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] workarounds.ensure_libvirt_rbd_instance_dir_cleanup = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.653780] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] workarounds.handle_virt_lifecycle_events = True {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.653953] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] workarounds.libvirt_disable_apic = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.654113] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] workarounds.never_download_image_if_on_rbd = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.654274] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] workarounds.qemu_monitor_announce_self_count = 3 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.654434] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] workarounds.qemu_monitor_announce_self_interval = 1 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.654595] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] workarounds.reserve_disk_resource_for_image_cache = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.654754] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] workarounds.skip_cpu_compare_at_startup = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.654915] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] workarounds.skip_cpu_compare_on_dest = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.655074] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] workarounds.skip_hypervisor_version_check_on_lm = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.655232] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] workarounds.skip_reserve_in_use_ironic_nodes = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.655390] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] workarounds.unified_limits_count_pcpu_as_vcpu = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.655553] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] workarounds.wait_for_vif_plugged_event_during_hard_reboot = [] {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.655813] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] wsgi.api_paste_config = /etc/nova/api-paste.ini {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.656009] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] wsgi.client_socket_timeout = 900 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.656276] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] wsgi.default_pool_size = 1000 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.656452] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] wsgi.keep_alive = True {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.656622] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] wsgi.max_header_line = 16384 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.656788] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] wsgi.secure_proxy_ssl_header = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.656953] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] wsgi.ssl_ca_file = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.657112] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] wsgi.ssl_cert_file = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.657271] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] wsgi.ssl_key_file = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.657435] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] wsgi.tcp_keepidle = 600 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.657607] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] wsgi.wsgi_log_format = %(client_ip)s "%(request_line)s" status: %(status_code)s len: %(body_length)s time: %(wall_seconds).7f {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.657775] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] zvm.ca_file = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.657936] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] zvm.cloud_connector_url = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.658225] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] zvm.image_tmp_path = /opt/stack/data/n-cpu-1/images {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.658398] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] zvm.reachable_timeout = 300 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.658579] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] oslo_policy.enforce_new_defaults = True {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.658750] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] oslo_policy.enforce_scope = True {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.658929] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] oslo_policy.policy_default_rule = default {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.659107] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] oslo_policy.policy_dirs = ['policy.d'] {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.659280] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] oslo_policy.policy_file = policy.yaml {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.659449] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] oslo_policy.remote_content_type = application/x-www-form-urlencoded {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.659610] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] oslo_policy.remote_ssl_ca_crt_file = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.659802] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] oslo_policy.remote_ssl_client_crt_file = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.659956] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] oslo_policy.remote_ssl_client_key_file = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.660132] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] oslo_policy.remote_ssl_verify_server_crt = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.660303] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] oslo_versionedobjects.fatal_exception_format_errors = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.660477] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] oslo_middleware.http_basic_auth_user_file = /etc/htpasswd {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.660652] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] profiler.connection_string = messaging:// {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.660836] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] profiler.enabled = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.661018] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] profiler.es_doc_type = notification {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.661185] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] profiler.es_scroll_size = 10000 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.661356] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] profiler.es_scroll_time = 2m {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.661515] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] profiler.filter_error_trace = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.661685] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] profiler.hmac_keys = **** {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.661854] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] profiler.sentinel_service_name = mymaster {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.662021] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] profiler.socket_timeout = 0.1 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.662182] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] profiler.trace_requests = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.662343] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] profiler.trace_sqlalchemy = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.662525] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] profiler_jaeger.process_tags = {} {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.662687] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] profiler_jaeger.service_name_prefix = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.662849] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] profiler_otlp.service_name_prefix = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.663016] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] remote_debug.host = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.663177] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] remote_debug.port = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.663353] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] oslo_messaging_rabbit.amqp_auto_delete = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.663514] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] oslo_messaging_rabbit.amqp_durable_queues = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.663675] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] oslo_messaging_rabbit.conn_pool_min_size = 2 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.663837] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] oslo_messaging_rabbit.conn_pool_ttl = 1200 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.664000] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] oslo_messaging_rabbit.direct_mandatory_flag = True {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.664176] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] oslo_messaging_rabbit.enable_cancel_on_failover = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.664337] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] oslo_messaging_rabbit.heartbeat_in_pthread = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.664500] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] oslo_messaging_rabbit.heartbeat_rate = 3 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.664661] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] oslo_messaging_rabbit.heartbeat_timeout_threshold = 60 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.664830] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] oslo_messaging_rabbit.hostname = devstack {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.664988] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] oslo_messaging_rabbit.kombu_compression = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.665154] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] oslo_messaging_rabbit.kombu_failover_strategy = round-robin {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.665319] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] oslo_messaging_rabbit.kombu_missing_consumer_retry_timeout = 60 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.665486] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] oslo_messaging_rabbit.kombu_reconnect_delay = 1.0 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.665653] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] oslo_messaging_rabbit.processname = nova-compute {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.665889] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] oslo_messaging_rabbit.rabbit_ha_queues = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.666074] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] oslo_messaging_rabbit.rabbit_interval_max = 30 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.666339] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] oslo_messaging_rabbit.rabbit_login_method = AMQPLAIN {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.666516] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] oslo_messaging_rabbit.rabbit_qos_prefetch_count = 0 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.666681] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] oslo_messaging_rabbit.rabbit_quorum_delivery_limit = 0 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.666849] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_bytes = 0 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.667016] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_length = 0 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.667180] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] oslo_messaging_rabbit.rabbit_quorum_queue = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.667349] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] oslo_messaging_rabbit.rabbit_retry_backoff = 2 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.667512] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] oslo_messaging_rabbit.rabbit_retry_interval = 1 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.667675] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] oslo_messaging_rabbit.rabbit_stream_fanout = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.667838] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] oslo_messaging_rabbit.rabbit_transient_queues_ttl = 1800 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.668009] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] oslo_messaging_rabbit.rabbit_transient_quorum_queue = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.668183] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] oslo_messaging_rabbit.rpc_conn_pool_size = 30 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.668352] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] oslo_messaging_rabbit.ssl = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.668523] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] oslo_messaging_rabbit.ssl_ca_file = {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.668691] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] oslo_messaging_rabbit.ssl_cert_file = {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.668853] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] oslo_messaging_rabbit.ssl_enforce_fips_mode = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.669024] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] oslo_messaging_rabbit.ssl_key_file = {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.669194] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] oslo_messaging_rabbit.ssl_version = {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.669354] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] oslo_messaging_rabbit.use_queue_manager = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.669539] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] oslo_messaging_notifications.driver = ['messagingv2'] {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.669706] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] oslo_messaging_notifications.retry = -1 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.669920] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] oslo_messaging_notifications.topics = ['notifications'] {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.670105] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] oslo_messaging_notifications.transport_url = **** {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.670280] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] oslo_limit.auth_section = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.670441] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] oslo_limit.auth_type = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.670597] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] oslo_limit.cafile = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.670772] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] oslo_limit.certfile = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.670956] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] oslo_limit.collect_timing = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.671116] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] oslo_limit.connect_retries = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.671274] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] oslo_limit.connect_retry_delay = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.671433] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] oslo_limit.endpoint_id = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.671586] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] oslo_limit.endpoint_override = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.671745] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] oslo_limit.insecure = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.671903] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] oslo_limit.keyfile = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.672086] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] oslo_limit.max_version = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.672251] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] oslo_limit.min_version = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.672407] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] oslo_limit.region_name = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.672565] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] oslo_limit.retriable_status_codes = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.672723] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] oslo_limit.service_name = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.672879] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] oslo_limit.service_type = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.673044] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] oslo_limit.split_loggers = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.673199] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] oslo_limit.status_code_retries = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.673354] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] oslo_limit.status_code_retry_delay = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.673511] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] oslo_limit.timeout = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.673666] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] oslo_limit.valid_interfaces = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.673823] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] oslo_limit.version = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.673989] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] oslo_reports.file_event_handler = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.674152] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] oslo_reports.file_event_handler_interval = 1 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.674310] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] oslo_reports.log_dir = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.674476] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] vif_plug_linux_bridge_privileged.capabilities = [12] {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.674631] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] vif_plug_linux_bridge_privileged.group = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.674787] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] vif_plug_linux_bridge_privileged.helper_command = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.674952] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] vif_plug_linux_bridge_privileged.logger_name = oslo_privsep.daemon {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.675113] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] vif_plug_linux_bridge_privileged.thread_pool_size = 8 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.675269] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] vif_plug_linux_bridge_privileged.user = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.675434] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] vif_plug_ovs_privileged.capabilities = [12, 1] {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.675591] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] vif_plug_ovs_privileged.group = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.675746] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] vif_plug_ovs_privileged.helper_command = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.675979] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] vif_plug_ovs_privileged.logger_name = oslo_privsep.daemon {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.676170] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] vif_plug_ovs_privileged.thread_pool_size = 8 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.676413] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] vif_plug_ovs_privileged.user = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.676596] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] os_vif_linux_bridge.flat_interface = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.676778] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] os_vif_linux_bridge.forward_bridge_interface = ['all'] {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.676951] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] os_vif_linux_bridge.iptables_bottom_regex = {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.677121] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] os_vif_linux_bridge.iptables_drop_action = DROP {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.677292] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] os_vif_linux_bridge.iptables_top_regex = {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.677458] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] os_vif_linux_bridge.network_device_mtu = 1500 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.677623] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] os_vif_linux_bridge.use_ipv6 = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.677784] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] os_vif_linux_bridge.vlan_interface = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.677963] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] os_vif_ovs.default_qos_type = linux-noop {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.678132] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] os_vif_ovs.isolate_vif = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.678302] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] os_vif_ovs.network_device_mtu = 1500 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.678466] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] os_vif_ovs.ovs_vsctl_timeout = 120 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.678635] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] os_vif_ovs.ovsdb_connection = tcp:127.0.0.1:6640 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.678803] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] os_vif_ovs.ovsdb_interface = native {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.678964] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] os_vif_ovs.per_port_bridge = False {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.679128] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] os_brick.lock_path = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.679292] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] privsep_osbrick.capabilities = [21] {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.679448] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] privsep_osbrick.group = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.679602] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] privsep_osbrick.helper_command = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.679798] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] privsep_osbrick.logger_name = os_brick.privileged {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.679961] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] privsep_osbrick.thread_pool_size = 8 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.680134] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] privsep_osbrick.user = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.680306] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] nova_sys_admin.capabilities = [0, 1, 2, 3, 12, 21] {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.680462] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] nova_sys_admin.group = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.680615] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] nova_sys_admin.helper_command = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.680797] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] nova_sys_admin.logger_name = oslo_privsep.daemon {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.680973] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] nova_sys_admin.thread_pool_size = 8 {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.681129] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] nova_sys_admin.user = None {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 472.681258] env[61995]: DEBUG oslo_service.service [None req-354a1843-8c34-4abe-ae39-4b5a5b357a0f None None] ******************************************************************************** {{(pid=61995) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2624}} [ 472.681682] env[61995]: INFO nova.service [-] Starting compute node (version 0.0.1) [ 472.682522] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-70162dec-4389-4f64-9e7d-32358f404198 None None] Expecting reply to msg 769bb1f1f47740c1b1ccca7d1a5b7272 in queue reply_757213bc08bb49dab178826d88b76f40 [ 472.701100] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 769bb1f1f47740c1b1ccca7d1a5b7272 [ 473.184844] env[61995]: DEBUG nova.virt.vmwareapi.vmops [None req-70162dec-4389-4f64-9e7d-32358f404198 None None] Getting list of instances from cluster (obj){ [ 473.184844] env[61995]: value = "domain-c8" [ 473.184844] env[61995]: _type = "ClusterComputeResource" [ 473.184844] env[61995]: } {{(pid=61995) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 473.186004] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a6586ee-1011-4002-94a3-e5c34dace307 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 473.195387] env[61995]: DEBUG nova.virt.vmwareapi.vmops [None req-70162dec-4389-4f64-9e7d-32358f404198 None None] Got total of 0 instances {{(pid=61995) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 473.195985] env[61995]: WARNING nova.virt.vmwareapi.driver [None req-70162dec-4389-4f64-9e7d-32358f404198 None None] The vmwareapi driver is not tested by the OpenStack project nor does it have clear maintainer(s) and thus its quality can not be ensured. It should be considered experimental and may be removed in a future release. If you are using the driver in production please let us know via the openstack-discuss mailing list. [ 473.196492] env[61995]: INFO nova.virt.node [None req-70162dec-4389-4f64-9e7d-32358f404198 None None] Generated node identity c68e615f-c0c6-4278-bb7b-a579e68878a7 [ 473.196733] env[61995]: INFO nova.virt.node [None req-70162dec-4389-4f64-9e7d-32358f404198 None None] Wrote node identity c68e615f-c0c6-4278-bb7b-a579e68878a7 to /opt/stack/data/n-cpu-1/compute_id [ 473.197155] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-70162dec-4389-4f64-9e7d-32358f404198 None None] Expecting reply to msg 89f803e5bf57416e90add47b221e4529 in queue reply_757213bc08bb49dab178826d88b76f40 [ 473.211101] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 89f803e5bf57416e90add47b221e4529 [ 473.699356] env[61995]: WARNING nova.compute.manager [None req-70162dec-4389-4f64-9e7d-32358f404198 None None] Compute nodes ['c68e615f-c0c6-4278-bb7b-a579e68878a7'] for host cpu-1 were not found in the database. If this is the first time this service is starting on this host, then you can ignore this warning. [ 473.700183] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-70162dec-4389-4f64-9e7d-32358f404198 None None] Expecting reply to msg 9566b46f21794bc3a9d852690c5d2f9e in queue reply_757213bc08bb49dab178826d88b76f40 [ 473.724316] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9566b46f21794bc3a9d852690c5d2f9e [ 474.203517] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-70162dec-4389-4f64-9e7d-32358f404198 None None] Expecting reply to msg 1beab70f6bd445528ae95db4966fa852 in queue reply_757213bc08bb49dab178826d88b76f40 [ 474.214729] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1beab70f6bd445528ae95db4966fa852 [ 474.705721] env[61995]: INFO nova.compute.manager [None req-70162dec-4389-4f64-9e7d-32358f404198 None None] Looking for unclaimed instances stuck in BUILDING status for nodes managed by this host [ 474.706217] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-70162dec-4389-4f64-9e7d-32358f404198 None None] Expecting reply to msg 4912a906a25c4c159db1406b33bdbd45 in queue reply_757213bc08bb49dab178826d88b76f40 [ 474.717943] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4912a906a25c4c159db1406b33bdbd45 [ 475.209495] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-70162dec-4389-4f64-9e7d-32358f404198 None None] Expecting reply to msg f0207bd50ac041e6a3629f6b8021472f in queue reply_757213bc08bb49dab178826d88b76f40 [ 475.221219] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f0207bd50ac041e6a3629f6b8021472f [ 475.712503] env[61995]: WARNING nova.compute.manager [None req-70162dec-4389-4f64-9e7d-32358f404198 None None] No compute node record found for host cpu-1. If this is the first time this service is starting on this host, then you can ignore this warning.: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host cpu-1 could not be found. [ 475.712751] env[61995]: DEBUG oslo_concurrency.lockutils [None req-70162dec-4389-4f64-9e7d-32358f404198 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 475.713189] env[61995]: DEBUG oslo_concurrency.lockutils [None req-70162dec-4389-4f64-9e7d-32358f404198 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 475.713285] env[61995]: DEBUG oslo_concurrency.lockutils [None req-70162dec-4389-4f64-9e7d-32358f404198 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 475.713438] env[61995]: DEBUG nova.compute.resource_tracker [None req-70162dec-4389-4f64-9e7d-32358f404198 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61995) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 475.714394] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99752e66-0c93-464f-9953-77462c51db15 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 475.722621] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac37faa5-c475-4433-bc8a-416805909459 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 475.737559] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d442cae4-f89a-4983-8596-4ad968e9a8bf {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 475.743433] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95aa29d5-08f0-40a1-91a9-d167f2bff98e {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 475.771850] env[61995]: DEBUG nova.compute.resource_tracker [None req-70162dec-4389-4f64-9e7d-32358f404198 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181744MB free_disk=139GB free_vcpus=48 pci_devices=None {{(pid=61995) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 475.771983] env[61995]: DEBUG oslo_concurrency.lockutils [None req-70162dec-4389-4f64-9e7d-32358f404198 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 475.772183] env[61995]: DEBUG oslo_concurrency.lockutils [None req-70162dec-4389-4f64-9e7d-32358f404198 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 475.772520] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-70162dec-4389-4f64-9e7d-32358f404198 None None] Expecting reply to msg 5873e3572bc34d9d9c39830b686b479b in queue reply_757213bc08bb49dab178826d88b76f40 [ 475.783200] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5873e3572bc34d9d9c39830b686b479b [ 476.275001] env[61995]: WARNING nova.compute.resource_tracker [None req-70162dec-4389-4f64-9e7d-32358f404198 None None] No compute node record for cpu-1:c68e615f-c0c6-4278-bb7b-a579e68878a7: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host c68e615f-c0c6-4278-bb7b-a579e68878a7 could not be found. [ 476.276231] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-70162dec-4389-4f64-9e7d-32358f404198 None None] Expecting reply to msg 9e0068842f734a328b4273ba04c6f3ad in queue reply_757213bc08bb49dab178826d88b76f40 [ 476.287529] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9e0068842f734a328b4273ba04c6f3ad [ 476.779101] env[61995]: INFO nova.compute.resource_tracker [None req-70162dec-4389-4f64-9e7d-32358f404198 None None] Compute node record created for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 with uuid: c68e615f-c0c6-4278-bb7b-a579e68878a7 [ 476.779563] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-70162dec-4389-4f64-9e7d-32358f404198 None None] Expecting reply to msg 4f9559ef0f0b48559dce6071112fa32a in queue reply_757213bc08bb49dab178826d88b76f40 [ 476.790043] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4f9559ef0f0b48559dce6071112fa32a [ 477.282684] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-70162dec-4389-4f64-9e7d-32358f404198 None None] Expecting reply to msg ab88cb4a9d3b44748b82b7bf5096e0e5 in queue reply_757213bc08bb49dab178826d88b76f40 [ 477.301710] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ab88cb4a9d3b44748b82b7bf5096e0e5 [ 477.784769] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-70162dec-4389-4f64-9e7d-32358f404198 None None] Expecting reply to msg 12505aadc03e4dfeb2137f296e0587db in queue reply_757213bc08bb49dab178826d88b76f40 [ 477.805893] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 12505aadc03e4dfeb2137f296e0587db [ 478.287087] env[61995]: DEBUG nova.compute.resource_tracker [None req-70162dec-4389-4f64-9e7d-32358f404198 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=61995) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 478.287493] env[61995]: DEBUG nova.compute.resource_tracker [None req-70162dec-4389-4f64-9e7d-32358f404198 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=61995) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 478.438267] env[61995]: INFO nova.scheduler.client.report [None req-70162dec-4389-4f64-9e7d-32358f404198 None None] [req-80e7993d-7c03-4714-a38f-423cd3e6f092] Created resource provider record via placement API for resource provider with UUID c68e615f-c0c6-4278-bb7b-a579e68878a7 and name domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28. [ 478.455425] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4f83085-3721-46df-96a7-0968d5f69812 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 478.463167] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfdc8097-7fab-4343-a511-9f569cee11d6 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 478.493809] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a477415e-2026-4ea9-81f7-10db649e6e50 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 478.501189] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fda53e0-7ffd-46f0-bc11-b086c895acc4 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 478.514163] env[61995]: DEBUG nova.compute.provider_tree [None req-70162dec-4389-4f64-9e7d-32358f404198 None None] Updating inventory in ProviderTree for provider c68e615f-c0c6-4278-bb7b-a579e68878a7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61995) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 478.514782] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-70162dec-4389-4f64-9e7d-32358f404198 None None] Expecting reply to msg b07d946a98e14641bbb98a3a4c6eb914 in queue reply_757213bc08bb49dab178826d88b76f40 [ 478.521605] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b07d946a98e14641bbb98a3a4c6eb914 [ 479.046436] env[61995]: DEBUG nova.scheduler.client.report [None req-70162dec-4389-4f64-9e7d-32358f404198 None None] Updated inventory for provider c68e615f-c0c6-4278-bb7b-a579e68878a7 with generation 0 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61995) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 479.046636] env[61995]: DEBUG nova.compute.provider_tree [None req-70162dec-4389-4f64-9e7d-32358f404198 None None] Updating resource provider c68e615f-c0c6-4278-bb7b-a579e68878a7 generation from 0 to 1 during operation: update_inventory {{(pid=61995) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 479.046744] env[61995]: DEBUG nova.compute.provider_tree [None req-70162dec-4389-4f64-9e7d-32358f404198 None None] Updating inventory in ProviderTree for provider c68e615f-c0c6-4278-bb7b-a579e68878a7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61995) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 479.105818] env[61995]: DEBUG nova.compute.provider_tree [None req-70162dec-4389-4f64-9e7d-32358f404198 None None] Updating resource provider c68e615f-c0c6-4278-bb7b-a579e68878a7 generation from 1 to 2 during operation: update_traits {{(pid=61995) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 479.108330] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-70162dec-4389-4f64-9e7d-32358f404198 None None] Expecting reply to msg 769771544a064286a578fbb9c35f370a in queue reply_757213bc08bb49dab178826d88b76f40 [ 479.122443] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 769771544a064286a578fbb9c35f370a [ 479.610978] env[61995]: DEBUG nova.compute.resource_tracker [None req-70162dec-4389-4f64-9e7d-32358f404198 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61995) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 479.611330] env[61995]: DEBUG oslo_concurrency.lockutils [None req-70162dec-4389-4f64-9e7d-32358f404198 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.839s {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 479.611366] env[61995]: DEBUG nova.service [None req-70162dec-4389-4f64-9e7d-32358f404198 None None] Creating RPC server for service compute {{(pid=61995) start /opt/stack/nova/nova/service.py:186}} [ 479.621408] env[61995]: INFO oslo.messaging._drivers.impl_rabbit [None req-70162dec-4389-4f64-9e7d-32358f404198 None None] Creating fanout queue: compute_fanout_e77ec50fd5df4df690d08fc18c622d68 [ 479.624228] env[61995]: DEBUG nova.service [None req-70162dec-4389-4f64-9e7d-32358f404198 None None] Join ServiceGroup membership for this service compute {{(pid=61995) start /opt/stack/nova/nova/service.py:203}} [ 479.624393] env[61995]: DEBUG nova.servicegroup.drivers.db [None req-70162dec-4389-4f64-9e7d-32358f404198 None None] DB_Driver: join new ServiceGroup member cpu-1 to the compute group, service = {{(pid=61995) join /opt/stack/nova/nova/servicegroup/drivers/db.py:44}} [ 484.626043] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 966a55fde1c340219eb79f5e12000f70 in queue reply_757213bc08bb49dab178826d88b76f40 [ 484.637706] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 966a55fde1c340219eb79f5e12000f70 [ 516.986829] env[61995]: DEBUG oslo_concurrency.lockutils [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Acquiring lock "1bf7f97f-ccfe-4ee0-a6fb-2234769c4b62" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 516.987098] env[61995]: DEBUG oslo_concurrency.lockutils [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Lock "1bf7f97f-ccfe-4ee0-a6fb-2234769c4b62" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 516.988140] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Expecting reply to msg 9354dd71150144eaaa0d18e0589f50c0 in queue reply_757213bc08bb49dab178826d88b76f40 [ 517.014488] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9354dd71150144eaaa0d18e0589f50c0 [ 517.490404] env[61995]: DEBUG nova.compute.manager [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] [instance: 1bf7f97f-ccfe-4ee0-a6fb-2234769c4b62] Starting instance... {{(pid=61995) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 517.492215] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Expecting reply to msg 2dc366ef3c3c48c2b293a14f6379aba8 in queue reply_757213bc08bb49dab178826d88b76f40 [ 517.585400] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2dc366ef3c3c48c2b293a14f6379aba8 [ 517.626491] env[61995]: DEBUG oslo_service.periodic_task [None req-71c2fabe-40c2-4828-87e6-8ede90ec8b78 None None] Running periodic task ComputeManager._sync_power_states {{(pid=61995) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 517.627160] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-71c2fabe-40c2-4828-87e6-8ede90ec8b78 None None] Expecting reply to msg 5bf95abd0035479dabff313d5c0f340b in queue reply_757213bc08bb49dab178826d88b76f40 [ 517.639818] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5bf95abd0035479dabff313d5c0f340b [ 518.027121] env[61995]: DEBUG oslo_concurrency.lockutils [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 518.027510] env[61995]: DEBUG oslo_concurrency.lockutils [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 518.029523] env[61995]: INFO nova.compute.claims [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] [instance: 1bf7f97f-ccfe-4ee0-a6fb-2234769c4b62] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 518.031147] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Expecting reply to msg 420c13030d184e2d842d9859f5e191d2 in queue reply_757213bc08bb49dab178826d88b76f40 [ 518.084084] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 420c13030d184e2d842d9859f5e191d2 [ 518.130524] env[61995]: DEBUG nova.virt.vmwareapi.vmops [None req-71c2fabe-40c2-4828-87e6-8ede90ec8b78 None None] Getting list of instances from cluster (obj){ [ 518.130524] env[61995]: value = "domain-c8" [ 518.130524] env[61995]: _type = "ClusterComputeResource" [ 518.130524] env[61995]: } {{(pid=61995) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 518.130800] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76724e85-c641-4128-8e5a-8721e468cb1a {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 518.144613] env[61995]: DEBUG nova.virt.vmwareapi.vmops [None req-71c2fabe-40c2-4828-87e6-8ede90ec8b78 None None] Got total of 0 instances {{(pid=61995) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 518.144954] env[61995]: DEBUG oslo_service.periodic_task [None req-71c2fabe-40c2-4828-87e6-8ede90ec8b78 None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=61995) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 518.145432] env[61995]: DEBUG nova.virt.vmwareapi.vmops [None req-71c2fabe-40c2-4828-87e6-8ede90ec8b78 None None] Getting list of instances from cluster (obj){ [ 518.145432] env[61995]: value = "domain-c8" [ 518.145432] env[61995]: _type = "ClusterComputeResource" [ 518.145432] env[61995]: } {{(pid=61995) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 518.146602] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f60fefa3-09f4-4439-b095-86d55e51c5e9 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 518.157062] env[61995]: DEBUG nova.virt.vmwareapi.vmops [None req-71c2fabe-40c2-4828-87e6-8ede90ec8b78 None None] Got total of 0 instances {{(pid=61995) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 518.534486] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Expecting reply to msg 2f26eec56f6042628680e49266ce7b37 in queue reply_757213bc08bb49dab178826d88b76f40 [ 518.547343] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2f26eec56f6042628680e49266ce7b37 [ 519.083881] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62a355ea-b604-42c9-b309-abb8fba2f7ab {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 519.092907] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fff1fe5b-4c66-4f60-a23b-281f1c83eb5b {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 519.145458] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c39479b-be90-400d-aa2e-71d55a233f37 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 519.153318] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f7555d8-0327-42dc-b3e2-68e4363f9213 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 519.167139] env[61995]: DEBUG nova.compute.provider_tree [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Inventory has not changed in ProviderTree for provider: c68e615f-c0c6-4278-bb7b-a579e68878a7 {{(pid=61995) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 519.167520] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Expecting reply to msg 0f54f63750f14f17841ec76372ff9002 in queue reply_757213bc08bb49dab178826d88b76f40 [ 519.175334] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0f54f63750f14f17841ec76372ff9002 [ 519.495923] env[61995]: DEBUG oslo_concurrency.lockutils [None req-220a2570-a212-490a-b3c7-e114abe2c150 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] Acquiring lock "5527576a-d56d-42c3-a7f7-02c66c0d1b3d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 519.496209] env[61995]: DEBUG oslo_concurrency.lockutils [None req-220a2570-a212-490a-b3c7-e114abe2c150 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] Lock "5527576a-d56d-42c3-a7f7-02c66c0d1b3d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 519.496692] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-220a2570-a212-490a-b3c7-e114abe2c150 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] Expecting reply to msg 0ee99186b3964704b32c1bc0f3c49aa6 in queue reply_757213bc08bb49dab178826d88b76f40 [ 519.518683] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0ee99186b3964704b32c1bc0f3c49aa6 [ 519.670792] env[61995]: DEBUG nova.scheduler.client.report [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Inventory has not changed for provider c68e615f-c0c6-4278-bb7b-a579e68878a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61995) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 519.673126] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Expecting reply to msg 8e69b757e8c04a66a51773c8c7a2375a in queue reply_757213bc08bb49dab178826d88b76f40 [ 519.686629] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8e69b757e8c04a66a51773c8c7a2375a [ 519.999174] env[61995]: DEBUG nova.compute.manager [None req-220a2570-a212-490a-b3c7-e114abe2c150 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] [instance: 5527576a-d56d-42c3-a7f7-02c66c0d1b3d] Starting instance... {{(pid=61995) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 520.001162] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-220a2570-a212-490a-b3c7-e114abe2c150 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] Expecting reply to msg 2181c1278ac14bc3b2539aad92e252a9 in queue reply_757213bc08bb49dab178826d88b76f40 [ 520.043619] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2181c1278ac14bc3b2539aad92e252a9 [ 520.175041] env[61995]: DEBUG oslo_concurrency.lockutils [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.147s {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 520.175616] env[61995]: DEBUG nova.compute.manager [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] [instance: 1bf7f97f-ccfe-4ee0-a6fb-2234769c4b62] Start building networks asynchronously for instance. {{(pid=61995) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 520.177303] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Expecting reply to msg 41176823ab1a4398b342cf6c35b21c52 in queue reply_757213bc08bb49dab178826d88b76f40 [ 520.262040] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 41176823ab1a4398b342cf6c35b21c52 [ 520.523155] env[61995]: DEBUG oslo_concurrency.lockutils [None req-220a2570-a212-490a-b3c7-e114abe2c150 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 520.523400] env[61995]: DEBUG oslo_concurrency.lockutils [None req-220a2570-a212-490a-b3c7-e114abe2c150 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 520.524892] env[61995]: INFO nova.compute.claims [None req-220a2570-a212-490a-b3c7-e114abe2c150 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] [instance: 5527576a-d56d-42c3-a7f7-02c66c0d1b3d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 520.526468] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-220a2570-a212-490a-b3c7-e114abe2c150 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] Expecting reply to msg 17930c26d042432499e8569699804732 in queue reply_757213bc08bb49dab178826d88b76f40 [ 520.563367] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 17930c26d042432499e8569699804732 [ 520.680974] env[61995]: DEBUG nova.compute.utils [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Using /dev/sd instead of None {{(pid=61995) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 520.681690] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Expecting reply to msg 8bb3491c983b479a938aad502c3fbf45 in queue reply_757213bc08bb49dab178826d88b76f40 [ 520.682647] env[61995]: DEBUG nova.compute.manager [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] [instance: 1bf7f97f-ccfe-4ee0-a6fb-2234769c4b62] Not allocating networking since 'none' was specified. {{(pid=61995) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1986}} [ 520.695641] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8bb3491c983b479a938aad502c3fbf45 [ 521.030591] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-220a2570-a212-490a-b3c7-e114abe2c150 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] Expecting reply to msg bcd5eba18ee74c7a929c5e6d3ef02d42 in queue reply_757213bc08bb49dab178826d88b76f40 [ 521.043897] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bcd5eba18ee74c7a929c5e6d3ef02d42 [ 521.184023] env[61995]: DEBUG nova.compute.manager [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] [instance: 1bf7f97f-ccfe-4ee0-a6fb-2234769c4b62] Start building block device mappings for instance. {{(pid=61995) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 521.185788] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Expecting reply to msg f8cac0a97305435c996df56af711d650 in queue reply_757213bc08bb49dab178826d88b76f40 [ 521.233551] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f8cac0a97305435c996df56af711d650 [ 521.588050] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-021ff212-ceeb-4955-b7d0-a2b15fce48b9 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 521.596149] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5632e1a3-e583-4e44-b05d-c860785bf4cb {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 521.632600] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4eaed3c3-e2b8-4483-b722-84cdfa66c4f5 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 521.640733] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-886ca537-0f1a-4cc7-8659-c634a04fd7b8 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 521.655772] env[61995]: DEBUG nova.compute.provider_tree [None req-220a2570-a212-490a-b3c7-e114abe2c150 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] Inventory has not changed in ProviderTree for provider: c68e615f-c0c6-4278-bb7b-a579e68878a7 {{(pid=61995) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 521.656297] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-220a2570-a212-490a-b3c7-e114abe2c150 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] Expecting reply to msg 72faf5809031466f8af04291e8658926 in queue reply_757213bc08bb49dab178826d88b76f40 [ 521.663739] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 72faf5809031466f8af04291e8658926 [ 521.690072] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Expecting reply to msg 1dccbcaddee54d71888fe3d486350b1d in queue reply_757213bc08bb49dab178826d88b76f40 [ 521.744409] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1dccbcaddee54d71888fe3d486350b1d [ 522.042676] env[61995]: DEBUG oslo_concurrency.lockutils [None req-e7c4bbb5-54e6-4867-acb4-2fd4f1abacca tempest-FloatingIPsAssociationNegativeTestJSON-1491050792 tempest-FloatingIPsAssociationNegativeTestJSON-1491050792-project-member] Acquiring lock "cd3d1cbb-be77-4fd0-9666-0d544a19a16d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 522.042912] env[61995]: DEBUG oslo_concurrency.lockutils [None req-e7c4bbb5-54e6-4867-acb4-2fd4f1abacca tempest-FloatingIPsAssociationNegativeTestJSON-1491050792 tempest-FloatingIPsAssociationNegativeTestJSON-1491050792-project-member] Lock "cd3d1cbb-be77-4fd0-9666-0d544a19a16d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 522.043676] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-e7c4bbb5-54e6-4867-acb4-2fd4f1abacca tempest-FloatingIPsAssociationNegativeTestJSON-1491050792 tempest-FloatingIPsAssociationNegativeTestJSON-1491050792-project-member] Expecting reply to msg 2298d83b09204e3ab5efdc4a7ef0a6b6 in queue reply_757213bc08bb49dab178826d88b76f40 [ 522.053569] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2298d83b09204e3ab5efdc4a7ef0a6b6 [ 522.161965] env[61995]: DEBUG nova.scheduler.client.report [None req-220a2570-a212-490a-b3c7-e114abe2c150 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] Inventory has not changed for provider c68e615f-c0c6-4278-bb7b-a579e68878a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61995) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 522.164339] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-220a2570-a212-490a-b3c7-e114abe2c150 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] Expecting reply to msg 1ed91a1e3dee42f1aaded3c2d2d78c91 in queue reply_757213bc08bb49dab178826d88b76f40 [ 522.178182] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1ed91a1e3dee42f1aaded3c2d2d78c91 [ 522.194135] env[61995]: DEBUG nova.compute.manager [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] [instance: 1bf7f97f-ccfe-4ee0-a6fb-2234769c4b62] Start spawning the instance on the hypervisor. {{(pid=61995) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 522.353982] env[61995]: DEBUG nova.virt.hardware [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-06T07:57:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-06T07:57:11Z,direct_url=,disk_format='vmdk',id=947125f0-9664-40eb-953e-b1373b076c9f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3dc1e6434ef44fd0be01fd4ebf2b62ef',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-06T07:57:12Z,virtual_size=,visibility=), allow threads: False {{(pid=61995) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 522.354228] env[61995]: DEBUG nova.virt.hardware [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Flavor limits 0:0:0 {{(pid=61995) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 522.354378] env[61995]: DEBUG nova.virt.hardware [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Image limits 0:0:0 {{(pid=61995) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 522.354551] env[61995]: DEBUG nova.virt.hardware [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Flavor pref 0:0:0 {{(pid=61995) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 522.354689] env[61995]: DEBUG nova.virt.hardware [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Image pref 0:0:0 {{(pid=61995) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 522.354831] env[61995]: DEBUG nova.virt.hardware [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61995) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 522.355041] env[61995]: DEBUG nova.virt.hardware [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61995) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 522.355193] env[61995]: DEBUG nova.virt.hardware [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61995) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 522.355637] env[61995]: DEBUG nova.virt.hardware [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Got 1 possible topologies {{(pid=61995) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 522.355911] env[61995]: DEBUG nova.virt.hardware [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61995) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 522.355990] env[61995]: DEBUG nova.virt.hardware [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61995) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 522.356937] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76f4bcab-9cea-4fdc-aa85-710fa6a00c92 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 522.367700] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49ad1784-bf5a-427e-9b1a-9a0ad34b133e {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 522.392781] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd048244-fd20-499c-9da4-cc0a24fcec5f {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 522.411016] env[61995]: DEBUG nova.virt.vmwareapi.vmops [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] [instance: 1bf7f97f-ccfe-4ee0-a6fb-2234769c4b62] Instance VIF info [] {{(pid=61995) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 522.419924] env[61995]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Creating folder: OpenStack. Parent ref: group-v4. {{(pid=61995) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 522.420256] env[61995]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4ba8e322-e0a3-464e-858c-9ba9d20e2b1d {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 522.440827] env[61995]: INFO nova.virt.vmwareapi.vm_util [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Created folder: OpenStack in parent group-v4. [ 522.441070] env[61995]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Creating folder: Project (f4a2b2f4372e4b7b909f8bf942719e2d). Parent ref: group-v103904. {{(pid=61995) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 522.441450] env[61995]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1c2ef814-9742-4d50-bfb3-2517bc3f2534 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 522.451337] env[61995]: INFO nova.virt.vmwareapi.vm_util [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Created folder: Project (f4a2b2f4372e4b7b909f8bf942719e2d) in parent group-v103904. [ 522.451527] env[61995]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Creating folder: Instances. Parent ref: group-v103905. {{(pid=61995) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 522.451759] env[61995]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-db4bcdc7-ce84-4e60-85d0-bb87256cef24 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 522.463161] env[61995]: INFO nova.virt.vmwareapi.vm_util [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Created folder: Instances in parent group-v103905. [ 522.463423] env[61995]: DEBUG oslo.service.loopingcall [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61995) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 522.463606] env[61995]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1bf7f97f-ccfe-4ee0-a6fb-2234769c4b62] Creating VM on the ESX host {{(pid=61995) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 522.463799] env[61995]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4a6735c3-c44e-4422-b0e8-9ca6122cab3d {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 522.479506] env[61995]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 522.479506] env[61995]: value = "task-378062" [ 522.479506] env[61995]: _type = "Task" [ 522.479506] env[61995]: } to complete. {{(pid=61995) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 522.489903] env[61995]: DEBUG oslo_vmware.api [-] Task: {'id': task-378062, 'name': CreateVM_Task} progress is 0%. {{(pid=61995) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 522.545635] env[61995]: DEBUG nova.compute.manager [None req-e7c4bbb5-54e6-4867-acb4-2fd4f1abacca tempest-FloatingIPsAssociationNegativeTestJSON-1491050792 tempest-FloatingIPsAssociationNegativeTestJSON-1491050792-project-member] [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] Starting instance... {{(pid=61995) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 522.547874] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-e7c4bbb5-54e6-4867-acb4-2fd4f1abacca tempest-FloatingIPsAssociationNegativeTestJSON-1491050792 tempest-FloatingIPsAssociationNegativeTestJSON-1491050792-project-member] Expecting reply to msg a0e2b1aff82044e8a10822ae080ee874 in queue reply_757213bc08bb49dab178826d88b76f40 [ 522.591009] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a0e2b1aff82044e8a10822ae080ee874 [ 522.668712] env[61995]: DEBUG oslo_concurrency.lockutils [None req-220a2570-a212-490a-b3c7-e114abe2c150 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.145s {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 522.669248] env[61995]: DEBUG nova.compute.manager [None req-220a2570-a212-490a-b3c7-e114abe2c150 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] [instance: 5527576a-d56d-42c3-a7f7-02c66c0d1b3d] Start building networks asynchronously for instance. {{(pid=61995) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 522.671381] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-220a2570-a212-490a-b3c7-e114abe2c150 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] Expecting reply to msg 32de612a697f41e19d35fa4727e76c90 in queue reply_757213bc08bb49dab178826d88b76f40 [ 522.716508] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 32de612a697f41e19d35fa4727e76c90 [ 522.991409] env[61995]: DEBUG oslo_vmware.api [-] Task: {'id': task-378062, 'name': CreateVM_Task, 'duration_secs': 0.276329} completed successfully. {{(pid=61995) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 522.991558] env[61995]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1bf7f97f-ccfe-4ee0-a6fb-2234769c4b62] Created VM on the ESX host {{(pid=61995) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 522.992666] env[61995]: DEBUG oslo_vmware.service [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f71a369-c6f8-4be1-a482-34edd0e7d884 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 523.000225] env[61995]: DEBUG oslo_concurrency.lockutils [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/947125f0-9664-40eb-953e-b1373b076c9f" {{(pid=61995) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 523.000899] env[61995]: DEBUG oslo_concurrency.lockutils [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Acquired lock "[datastore1] devstack-image-cache_base/947125f0-9664-40eb-953e-b1373b076c9f" {{(pid=61995) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 523.003061] env[61995]: DEBUG oslo_concurrency.lockutils [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/947125f0-9664-40eb-953e-b1373b076c9f" {{(pid=61995) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 523.003061] env[61995]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-afe9fcb9-8de9-4bda-bad6-16fd5176b649 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 523.008104] env[61995]: DEBUG oslo_vmware.api [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Waiting for the task: (returnval){ [ 523.008104] env[61995]: value = "session[5258be39-d7e5-2e06-99ec-0419db7e1a3e]52be11ab-990a-1735-ea9b-e481b6f238c6" [ 523.008104] env[61995]: _type = "Task" [ 523.008104] env[61995]: } to complete. {{(pid=61995) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 523.015629] env[61995]: DEBUG oslo_vmware.api [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Task: {'id': session[5258be39-d7e5-2e06-99ec-0419db7e1a3e]52be11ab-990a-1735-ea9b-e481b6f238c6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61995) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 523.089501] env[61995]: DEBUG oslo_concurrency.lockutils [None req-e7c4bbb5-54e6-4867-acb4-2fd4f1abacca tempest-FloatingIPsAssociationNegativeTestJSON-1491050792 tempest-FloatingIPsAssociationNegativeTestJSON-1491050792-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 523.089805] env[61995]: DEBUG oslo_concurrency.lockutils [None req-e7c4bbb5-54e6-4867-acb4-2fd4f1abacca tempest-FloatingIPsAssociationNegativeTestJSON-1491050792 tempest-FloatingIPsAssociationNegativeTestJSON-1491050792-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 523.091336] env[61995]: INFO nova.compute.claims [None req-e7c4bbb5-54e6-4867-acb4-2fd4f1abacca tempest-FloatingIPsAssociationNegativeTestJSON-1491050792 tempest-FloatingIPsAssociationNegativeTestJSON-1491050792-project-member] [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 523.092954] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-e7c4bbb5-54e6-4867-acb4-2fd4f1abacca tempest-FloatingIPsAssociationNegativeTestJSON-1491050792 tempest-FloatingIPsAssociationNegativeTestJSON-1491050792-project-member] Expecting reply to msg b603ed9264c04b1a9ee1f16adb2d7543 in queue reply_757213bc08bb49dab178826d88b76f40 [ 523.148422] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b603ed9264c04b1a9ee1f16adb2d7543 [ 523.174669] env[61995]: DEBUG nova.compute.utils [None req-220a2570-a212-490a-b3c7-e114abe2c150 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] Using /dev/sd instead of None {{(pid=61995) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 523.175305] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-220a2570-a212-490a-b3c7-e114abe2c150 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] Expecting reply to msg 882483f2198b4abda86eef0587ef1c2e in queue reply_757213bc08bb49dab178826d88b76f40 [ 523.176208] env[61995]: DEBUG nova.compute.manager [None req-220a2570-a212-490a-b3c7-e114abe2c150 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] [instance: 5527576a-d56d-42c3-a7f7-02c66c0d1b3d] Not allocating networking since 'none' was specified. {{(pid=61995) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1986}} [ 523.204108] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 882483f2198b4abda86eef0587ef1c2e [ 523.308388] env[61995]: DEBUG oslo_concurrency.lockutils [None req-6edc7ef5-8dea-41f2-9cd6-a09a4eb02c4e tempest-ServerExternalEventsTest-367181382 tempest-ServerExternalEventsTest-367181382-project-member] Acquiring lock "4c0dd905-f751-4cb4-9be1-ef06518990f8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 523.308617] env[61995]: DEBUG oslo_concurrency.lockutils [None req-6edc7ef5-8dea-41f2-9cd6-a09a4eb02c4e tempest-ServerExternalEventsTest-367181382 tempest-ServerExternalEventsTest-367181382-project-member] Lock "4c0dd905-f751-4cb4-9be1-ef06518990f8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 523.309079] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-6edc7ef5-8dea-41f2-9cd6-a09a4eb02c4e tempest-ServerExternalEventsTest-367181382 tempest-ServerExternalEventsTest-367181382-project-member] Expecting reply to msg 9cb85a9bea894cdab004a3202806e9e6 in queue reply_757213bc08bb49dab178826d88b76f40 [ 523.323366] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9cb85a9bea894cdab004a3202806e9e6 [ 523.391428] env[61995]: DEBUG oslo_concurrency.lockutils [None req-ea640fa2-eaf2-48c6-9579-d88038be7ec7 tempest-MigrationsAdminTest-1704279807 tempest-MigrationsAdminTest-1704279807-project-member] Acquiring lock "b610e51c-f7a4-4e3a-85c1-28603fc82bab" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 523.391694] env[61995]: DEBUG oslo_concurrency.lockutils [None req-ea640fa2-eaf2-48c6-9579-d88038be7ec7 tempest-MigrationsAdminTest-1704279807 tempest-MigrationsAdminTest-1704279807-project-member] Lock "b610e51c-f7a4-4e3a-85c1-28603fc82bab" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 523.392180] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-ea640fa2-eaf2-48c6-9579-d88038be7ec7 tempest-MigrationsAdminTest-1704279807 tempest-MigrationsAdminTest-1704279807-project-member] Expecting reply to msg c5d07050e52c424b91ee02f6fd03dd0e in queue reply_757213bc08bb49dab178826d88b76f40 [ 523.401308] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c5d07050e52c424b91ee02f6fd03dd0e [ 523.523662] env[61995]: DEBUG oslo_concurrency.lockutils [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Releasing lock "[datastore1] devstack-image-cache_base/947125f0-9664-40eb-953e-b1373b076c9f" {{(pid=61995) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 523.523662] env[61995]: DEBUG nova.virt.vmwareapi.vmops [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] [instance: 1bf7f97f-ccfe-4ee0-a6fb-2234769c4b62] Processing image 947125f0-9664-40eb-953e-b1373b076c9f {{(pid=61995) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 523.523662] env[61995]: DEBUG oslo_concurrency.lockutils [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/947125f0-9664-40eb-953e-b1373b076c9f/947125f0-9664-40eb-953e-b1373b076c9f.vmdk" {{(pid=61995) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 523.523662] env[61995]: DEBUG oslo_concurrency.lockutils [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Acquired lock "[datastore1] devstack-image-cache_base/947125f0-9664-40eb-953e-b1373b076c9f/947125f0-9664-40eb-953e-b1373b076c9f.vmdk" {{(pid=61995) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 523.523842] env[61995]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61995) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 523.523842] env[61995]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2fe29cca-afc4-4949-af52-19567379f93a {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 523.527537] env[61995]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61995) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 523.527718] env[61995]: DEBUG nova.virt.vmwareapi.vmops [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61995) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 523.528517] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-142ab3ec-a657-404b-b162-187958e8b6c6 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 523.536912] env[61995]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-18a911b6-ac52-4554-bdbc-11a04489c2ae {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 523.544427] env[61995]: DEBUG oslo_vmware.api [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Waiting for the task: (returnval){ [ 523.544427] env[61995]: value = "session[5258be39-d7e5-2e06-99ec-0419db7e1a3e]52a76de7-ed89-44ec-b9fa-ef1e5df8f76a" [ 523.544427] env[61995]: _type = "Task" [ 523.544427] env[61995]: } to complete. {{(pid=61995) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 523.554126] env[61995]: DEBUG oslo_vmware.api [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Task: {'id': session[5258be39-d7e5-2e06-99ec-0419db7e1a3e]52a76de7-ed89-44ec-b9fa-ef1e5df8f76a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61995) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 523.597146] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-e7c4bbb5-54e6-4867-acb4-2fd4f1abacca tempest-FloatingIPsAssociationNegativeTestJSON-1491050792 tempest-FloatingIPsAssociationNegativeTestJSON-1491050792-project-member] Expecting reply to msg eee138b2f3314fe298400f82546cd24c in queue reply_757213bc08bb49dab178826d88b76f40 [ 523.603775] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg eee138b2f3314fe298400f82546cd24c [ 523.677145] env[61995]: DEBUG nova.compute.manager [None req-220a2570-a212-490a-b3c7-e114abe2c150 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] [instance: 5527576a-d56d-42c3-a7f7-02c66c0d1b3d] Start building block device mappings for instance. {{(pid=61995) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 523.678987] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-220a2570-a212-490a-b3c7-e114abe2c150 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] Expecting reply to msg 921a26d6add54160a5e18499fae9a930 in queue reply_757213bc08bb49dab178826d88b76f40 [ 523.721385] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 921a26d6add54160a5e18499fae9a930 [ 523.813402] env[61995]: DEBUG nova.compute.manager [None req-6edc7ef5-8dea-41f2-9cd6-a09a4eb02c4e tempest-ServerExternalEventsTest-367181382 tempest-ServerExternalEventsTest-367181382-project-member] [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] Starting instance... {{(pid=61995) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 523.813402] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-6edc7ef5-8dea-41f2-9cd6-a09a4eb02c4e tempest-ServerExternalEventsTest-367181382 tempest-ServerExternalEventsTest-367181382-project-member] Expecting reply to msg be13d613ed3141c0b57161b535174571 in queue reply_757213bc08bb49dab178826d88b76f40 [ 523.860821] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg be13d613ed3141c0b57161b535174571 [ 523.897718] env[61995]: DEBUG nova.compute.manager [None req-ea640fa2-eaf2-48c6-9579-d88038be7ec7 tempest-MigrationsAdminTest-1704279807 tempest-MigrationsAdminTest-1704279807-project-member] [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] Starting instance... {{(pid=61995) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 523.897718] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-ea640fa2-eaf2-48c6-9579-d88038be7ec7 tempest-MigrationsAdminTest-1704279807 tempest-MigrationsAdminTest-1704279807-project-member] Expecting reply to msg 126bc7e838414ff687f2e1b3c3d344f0 in queue reply_757213bc08bb49dab178826d88b76f40 [ 523.944346] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 126bc7e838414ff687f2e1b3c3d344f0 [ 524.055835] env[61995]: DEBUG nova.virt.vmwareapi.vmops [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] [instance: 1bf7f97f-ccfe-4ee0-a6fb-2234769c4b62] Preparing fetch location {{(pid=61995) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 524.055835] env[61995]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Creating directory with path [datastore1] vmware_temp/fe1628e8-a46e-45d7-a82a-f0252aa48589/947125f0-9664-40eb-953e-b1373b076c9f {{(pid=61995) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 524.056246] env[61995]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-52894b22-c821-4651-99c3-87efcca4c021 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.111077] env[61995]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Created directory with path [datastore1] vmware_temp/fe1628e8-a46e-45d7-a82a-f0252aa48589/947125f0-9664-40eb-953e-b1373b076c9f {{(pid=61995) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 524.111077] env[61995]: DEBUG nova.virt.vmwareapi.vmops [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] [instance: 1bf7f97f-ccfe-4ee0-a6fb-2234769c4b62] Fetch image to [datastore1] vmware_temp/fe1628e8-a46e-45d7-a82a-f0252aa48589/947125f0-9664-40eb-953e-b1373b076c9f/tmp-sparse.vmdk {{(pid=61995) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 524.111077] env[61995]: DEBUG nova.virt.vmwareapi.vmops [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] [instance: 1bf7f97f-ccfe-4ee0-a6fb-2234769c4b62] Downloading image file data 947125f0-9664-40eb-953e-b1373b076c9f to [datastore1] vmware_temp/fe1628e8-a46e-45d7-a82a-f0252aa48589/947125f0-9664-40eb-953e-b1373b076c9f/tmp-sparse.vmdk on the data store datastore1 {{(pid=61995) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 524.111077] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b79cddd-2b77-4ab5-acba-d6d20e9d0955 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.117092] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4357ad63-c9a1-4ed3-840a-fbf67f4921dd {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.137911] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22b520bb-ba90-477a-8847-200a8df00588 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.181800] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9624b7f4-95af-44cd-929d-81d007693cf6 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.187483] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-220a2570-a212-490a-b3c7-e114abe2c150 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] Expecting reply to msg bd9628ff75a241b7a0aab77d42ba12c1 in queue reply_757213bc08bb49dab178826d88b76f40 [ 524.213296] env[61995]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-ceae89c6-756d-4673-895e-cde8adfc7c3c {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.236205] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bd9628ff75a241b7a0aab77d42ba12c1 [ 524.236490] env[61995]: DEBUG nova.virt.vmwareapi.images [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] [instance: 1bf7f97f-ccfe-4ee0-a6fb-2234769c4b62] Downloading image file data 947125f0-9664-40eb-953e-b1373b076c9f to the data store datastore1 {{(pid=61995) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 524.263110] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59a24670-ba8c-4490-8b33-005e4b8309d2 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.271163] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fef835c-3bd4-4271-910a-f98d06f6dff9 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.306004] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6251acb-f2d8-41cb-9826-38869c84ca8a {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.327373] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4a3ad73-e50a-4ccc-a512-c8119a21aa1d {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.350354] env[61995]: DEBUG nova.compute.provider_tree [None req-e7c4bbb5-54e6-4867-acb4-2fd4f1abacca tempest-FloatingIPsAssociationNegativeTestJSON-1491050792 tempest-FloatingIPsAssociationNegativeTestJSON-1491050792-project-member] Inventory has not changed in ProviderTree for provider: c68e615f-c0c6-4278-bb7b-a579e68878a7 {{(pid=61995) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 524.351403] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-e7c4bbb5-54e6-4867-acb4-2fd4f1abacca tempest-FloatingIPsAssociationNegativeTestJSON-1491050792 tempest-FloatingIPsAssociationNegativeTestJSON-1491050792-project-member] Expecting reply to msg 863e810e771243c6ba6b433b8f65d466 in queue reply_757213bc08bb49dab178826d88b76f40 [ 524.363049] env[61995]: DEBUG oslo_concurrency.lockutils [None req-6edc7ef5-8dea-41f2-9cd6-a09a4eb02c4e tempest-ServerExternalEventsTest-367181382 tempest-ServerExternalEventsTest-367181382-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 524.363049] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 863e810e771243c6ba6b433b8f65d466 [ 524.414935] env[61995]: DEBUG oslo_concurrency.lockutils [None req-ea640fa2-eaf2-48c6-9579-d88038be7ec7 tempest-MigrationsAdminTest-1704279807 tempest-MigrationsAdminTest-1704279807-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 524.449318] env[61995]: DEBUG oslo_vmware.rw_handles [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/fe1628e8-a46e-45d7-a82a-f0252aa48589/947125f0-9664-40eb-953e-b1373b076c9f/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61995) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 524.536581] env[61995]: DEBUG oslo_concurrency.lockutils [None req-f3a8686d-cadc-4239-bfc5-8c2bb0366eb3 tempest-ServerDiagnosticsTest-1806019006 tempest-ServerDiagnosticsTest-1806019006-project-member] Acquiring lock "2a07c11a-4e95-47db-bf50-5ad720403faa" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 524.536581] env[61995]: DEBUG oslo_concurrency.lockutils [None req-f3a8686d-cadc-4239-bfc5-8c2bb0366eb3 tempest-ServerDiagnosticsTest-1806019006 tempest-ServerDiagnosticsTest-1806019006-project-member] Lock "2a07c11a-4e95-47db-bf50-5ad720403faa" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 524.536909] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-f3a8686d-cadc-4239-bfc5-8c2bb0366eb3 tempest-ServerDiagnosticsTest-1806019006 tempest-ServerDiagnosticsTest-1806019006-project-member] Expecting reply to msg b34023eb47c64f0aae882d13282446a7 in queue reply_757213bc08bb49dab178826d88b76f40 [ 524.552388] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b34023eb47c64f0aae882d13282446a7 [ 524.707059] env[61995]: DEBUG nova.compute.manager [None req-220a2570-a212-490a-b3c7-e114abe2c150 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] [instance: 5527576a-d56d-42c3-a7f7-02c66c0d1b3d] Start spawning the instance on the hypervisor. {{(pid=61995) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 524.736328] env[61995]: DEBUG nova.virt.hardware [None req-220a2570-a212-490a-b3c7-e114abe2c150 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-06T07:57:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-06T07:57:11Z,direct_url=,disk_format='vmdk',id=947125f0-9664-40eb-953e-b1373b076c9f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3dc1e6434ef44fd0be01fd4ebf2b62ef',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-06T07:57:12Z,virtual_size=,visibility=), allow threads: False {{(pid=61995) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 524.738866] env[61995]: DEBUG nova.virt.hardware [None req-220a2570-a212-490a-b3c7-e114abe2c150 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] Flavor limits 0:0:0 {{(pid=61995) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 524.739343] env[61995]: DEBUG nova.virt.hardware [None req-220a2570-a212-490a-b3c7-e114abe2c150 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] Image limits 0:0:0 {{(pid=61995) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 524.739803] env[61995]: DEBUG nova.virt.hardware [None req-220a2570-a212-490a-b3c7-e114abe2c150 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] Flavor pref 0:0:0 {{(pid=61995) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 524.740189] env[61995]: DEBUG nova.virt.hardware [None req-220a2570-a212-490a-b3c7-e114abe2c150 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] Image pref 0:0:0 {{(pid=61995) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 524.740596] env[61995]: DEBUG nova.virt.hardware [None req-220a2570-a212-490a-b3c7-e114abe2c150 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61995) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 524.741004] env[61995]: DEBUG nova.virt.hardware [None req-220a2570-a212-490a-b3c7-e114abe2c150 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61995) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 524.741395] env[61995]: DEBUG nova.virt.hardware [None req-220a2570-a212-490a-b3c7-e114abe2c150 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61995) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 524.741801] env[61995]: DEBUG nova.virt.hardware [None req-220a2570-a212-490a-b3c7-e114abe2c150 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] Got 1 possible topologies {{(pid=61995) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 524.742202] env[61995]: DEBUG nova.virt.hardware [None req-220a2570-a212-490a-b3c7-e114abe2c150 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61995) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 524.742600] env[61995]: DEBUG nova.virt.hardware [None req-220a2570-a212-490a-b3c7-e114abe2c150 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61995) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 524.744942] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a458be6-18d6-40a5-97d0-c3c8cfd7442b {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.763361] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbe3c149-09dd-4ba3-8d5f-8a478c599135 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.785066] env[61995]: DEBUG nova.virt.vmwareapi.vmops [None req-220a2570-a212-490a-b3c7-e114abe2c150 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] [instance: 5527576a-d56d-42c3-a7f7-02c66c0d1b3d] Instance VIF info [] {{(pid=61995) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 524.792316] env[61995]: DEBUG nova.virt.vmwareapi.vm_util [None req-220a2570-a212-490a-b3c7-e114abe2c150 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] Creating folder: Project (a471df907f5b4b348ffef5581eb08bbd). Parent ref: group-v103904. {{(pid=61995) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 524.795847] env[61995]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1d60269d-0095-41cc-809c-764f52bc7166 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.815211] env[61995]: INFO nova.virt.vmwareapi.vm_util [None req-220a2570-a212-490a-b3c7-e114abe2c150 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] Created folder: Project (a471df907f5b4b348ffef5581eb08bbd) in parent group-v103904. [ 524.815516] env[61995]: DEBUG nova.virt.vmwareapi.vm_util [None req-220a2570-a212-490a-b3c7-e114abe2c150 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] Creating folder: Instances. Parent ref: group-v103908. {{(pid=61995) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 524.818780] env[61995]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-17e785bf-4196-4db4-813b-dce5168285a9 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.829645] env[61995]: INFO nova.virt.vmwareapi.vm_util [None req-220a2570-a212-490a-b3c7-e114abe2c150 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] Created folder: Instances in parent group-v103908. [ 524.830051] env[61995]: DEBUG oslo.service.loopingcall [None req-220a2570-a212-490a-b3c7-e114abe2c150 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61995) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 524.830238] env[61995]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5527576a-d56d-42c3-a7f7-02c66c0d1b3d] Creating VM on the ESX host {{(pid=61995) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 524.830484] env[61995]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9ac7c288-2d69-486e-b9cb-16e0bbf56cca {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.856266] env[61995]: DEBUG nova.scheduler.client.report [None req-e7c4bbb5-54e6-4867-acb4-2fd4f1abacca tempest-FloatingIPsAssociationNegativeTestJSON-1491050792 tempest-FloatingIPsAssociationNegativeTestJSON-1491050792-project-member] Inventory has not changed for provider c68e615f-c0c6-4278-bb7b-a579e68878a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61995) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 524.862235] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-e7c4bbb5-54e6-4867-acb4-2fd4f1abacca tempest-FloatingIPsAssociationNegativeTestJSON-1491050792 tempest-FloatingIPsAssociationNegativeTestJSON-1491050792-project-member] Expecting reply to msg 1dad97edee6041f7ac7a89b4d11046d2 in queue reply_757213bc08bb49dab178826d88b76f40 [ 524.879558] env[61995]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 524.879558] env[61995]: value = "task-378065" [ 524.879558] env[61995]: _type = "Task" [ 524.879558] env[61995]: } to complete. {{(pid=61995) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 524.883174] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1dad97edee6041f7ac7a89b4d11046d2 [ 524.889173] env[61995]: DEBUG oslo_vmware.api [-] Task: {'id': task-378065, 'name': CreateVM_Task} progress is 6%. {{(pid=61995) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 525.044522] env[61995]: DEBUG nova.compute.manager [None req-f3a8686d-cadc-4239-bfc5-8c2bb0366eb3 tempest-ServerDiagnosticsTest-1806019006 tempest-ServerDiagnosticsTest-1806019006-project-member] [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] Starting instance... {{(pid=61995) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 525.046534] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-f3a8686d-cadc-4239-bfc5-8c2bb0366eb3 tempest-ServerDiagnosticsTest-1806019006 tempest-ServerDiagnosticsTest-1806019006-project-member] Expecting reply to msg b4e9c78c18c844d28cefd8c98c3ec475 in queue reply_757213bc08bb49dab178826d88b76f40 [ 525.094920] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b4e9c78c18c844d28cefd8c98c3ec475 [ 525.158112] env[61995]: DEBUG oslo_vmware.rw_handles [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Completed reading data from the image iterator. {{(pid=61995) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 525.158631] env[61995]: DEBUG oslo_vmware.rw_handles [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/fe1628e8-a46e-45d7-a82a-f0252aa48589/947125f0-9664-40eb-953e-b1373b076c9f/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61995) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 525.355385] env[61995]: DEBUG nova.virt.vmwareapi.images [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] [instance: 1bf7f97f-ccfe-4ee0-a6fb-2234769c4b62] Downloaded image file data 947125f0-9664-40eb-953e-b1373b076c9f to vmware_temp/fe1628e8-a46e-45d7-a82a-f0252aa48589/947125f0-9664-40eb-953e-b1373b076c9f/tmp-sparse.vmdk on the data store datastore1 {{(pid=61995) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 525.357096] env[61995]: DEBUG nova.virt.vmwareapi.vmops [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] [instance: 1bf7f97f-ccfe-4ee0-a6fb-2234769c4b62] Caching image {{(pid=61995) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 525.357336] env[61995]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Copying Virtual Disk [datastore1] vmware_temp/fe1628e8-a46e-45d7-a82a-f0252aa48589/947125f0-9664-40eb-953e-b1373b076c9f/tmp-sparse.vmdk to [datastore1] vmware_temp/fe1628e8-a46e-45d7-a82a-f0252aa48589/947125f0-9664-40eb-953e-b1373b076c9f/947125f0-9664-40eb-953e-b1373b076c9f.vmdk {{(pid=61995) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 525.358058] env[61995]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5acfd47a-7efd-433c-885d-db9045a5a00f {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 525.365593] env[61995]: DEBUG oslo_vmware.api [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Waiting for the task: (returnval){ [ 525.365593] env[61995]: value = "task-378066" [ 525.365593] env[61995]: _type = "Task" [ 525.365593] env[61995]: } to complete. {{(pid=61995) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 525.366553] env[61995]: DEBUG oslo_concurrency.lockutils [None req-e7c4bbb5-54e6-4867-acb4-2fd4f1abacca tempest-FloatingIPsAssociationNegativeTestJSON-1491050792 tempest-FloatingIPsAssociationNegativeTestJSON-1491050792-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.277s {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 525.367058] env[61995]: DEBUG nova.compute.manager [None req-e7c4bbb5-54e6-4867-acb4-2fd4f1abacca tempest-FloatingIPsAssociationNegativeTestJSON-1491050792 tempest-FloatingIPsAssociationNegativeTestJSON-1491050792-project-member] [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] Start building networks asynchronously for instance. {{(pid=61995) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 525.368758] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-e7c4bbb5-54e6-4867-acb4-2fd4f1abacca tempest-FloatingIPsAssociationNegativeTestJSON-1491050792 tempest-FloatingIPsAssociationNegativeTestJSON-1491050792-project-member] Expecting reply to msg 3e1f68adda814c328ad0bb3c148c51b2 in queue reply_757213bc08bb49dab178826d88b76f40 [ 525.373415] env[61995]: DEBUG oslo_concurrency.lockutils [None req-6edc7ef5-8dea-41f2-9cd6-a09a4eb02c4e tempest-ServerExternalEventsTest-367181382 tempest-ServerExternalEventsTest-367181382-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.014s {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 525.374862] env[61995]: INFO nova.compute.claims [None req-6edc7ef5-8dea-41f2-9cd6-a09a4eb02c4e tempest-ServerExternalEventsTest-367181382 tempest-ServerExternalEventsTest-367181382-project-member] [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 525.376411] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-6edc7ef5-8dea-41f2-9cd6-a09a4eb02c4e tempest-ServerExternalEventsTest-367181382 tempest-ServerExternalEventsTest-367181382-project-member] Expecting reply to msg 864c404c65694d2cb995a5577fb194a1 in queue reply_757213bc08bb49dab178826d88b76f40 [ 525.382484] env[61995]: DEBUG oslo_vmware.api [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Task: {'id': task-378066, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61995) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 525.394887] env[61995]: DEBUG oslo_vmware.api [-] Task: {'id': task-378065, 'name': CreateVM_Task, 'duration_secs': 0.277328} completed successfully. {{(pid=61995) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 525.395059] env[61995]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5527576a-d56d-42c3-a7f7-02c66c0d1b3d] Created VM on the ESX host {{(pid=61995) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 525.395472] env[61995]: DEBUG oslo_concurrency.lockutils [None req-220a2570-a212-490a-b3c7-e114abe2c150 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/947125f0-9664-40eb-953e-b1373b076c9f" {{(pid=61995) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 525.395624] env[61995]: DEBUG oslo_concurrency.lockutils [None req-220a2570-a212-490a-b3c7-e114abe2c150 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] Acquired lock "[datastore1] devstack-image-cache_base/947125f0-9664-40eb-953e-b1373b076c9f" {{(pid=61995) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 525.395932] env[61995]: DEBUG oslo_concurrency.lockutils [None req-220a2570-a212-490a-b3c7-e114abe2c150 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/947125f0-9664-40eb-953e-b1373b076c9f" {{(pid=61995) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 525.396194] env[61995]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6a62f424-3b39-4427-87cf-04216760a634 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 525.404153] env[61995]: DEBUG oslo_vmware.api [None req-220a2570-a212-490a-b3c7-e114abe2c150 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] Waiting for the task: (returnval){ [ 525.404153] env[61995]: value = "session[5258be39-d7e5-2e06-99ec-0419db7e1a3e]52690732-369b-3d22-2c9e-eff4afd2df04" [ 525.404153] env[61995]: _type = "Task" [ 525.404153] env[61995]: } to complete. {{(pid=61995) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 525.417686] env[61995]: DEBUG oslo_vmware.api [None req-220a2570-a212-490a-b3c7-e114abe2c150 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] Task: {'id': session[5258be39-d7e5-2e06-99ec-0419db7e1a3e]52690732-369b-3d22-2c9e-eff4afd2df04, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61995) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 525.421814] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3e1f68adda814c328ad0bb3c148c51b2 [ 525.441463] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 864c404c65694d2cb995a5577fb194a1 [ 525.570984] env[61995]: DEBUG oslo_concurrency.lockutils [None req-f3a8686d-cadc-4239-bfc5-8c2bb0366eb3 tempest-ServerDiagnosticsTest-1806019006 tempest-ServerDiagnosticsTest-1806019006-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 525.880617] env[61995]: DEBUG nova.compute.utils [None req-e7c4bbb5-54e6-4867-acb4-2fd4f1abacca tempest-FloatingIPsAssociationNegativeTestJSON-1491050792 tempest-FloatingIPsAssociationNegativeTestJSON-1491050792-project-member] Using /dev/sd instead of None {{(pid=61995) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 525.880617] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-e7c4bbb5-54e6-4867-acb4-2fd4f1abacca tempest-FloatingIPsAssociationNegativeTestJSON-1491050792 tempest-FloatingIPsAssociationNegativeTestJSON-1491050792-project-member] Expecting reply to msg a9ab4f494ccb43edae93168af504935d in queue reply_757213bc08bb49dab178826d88b76f40 [ 525.880617] env[61995]: DEBUG oslo_vmware.api [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Task: {'id': task-378066, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=61995) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 525.880617] env[61995]: DEBUG nova.compute.manager [None req-e7c4bbb5-54e6-4867-acb4-2fd4f1abacca tempest-FloatingIPsAssociationNegativeTestJSON-1491050792 tempest-FloatingIPsAssociationNegativeTestJSON-1491050792-project-member] [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] Allocating IP information in the background. {{(pid=61995) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1990}} [ 525.880617] env[61995]: DEBUG nova.network.neutron [None req-e7c4bbb5-54e6-4867-acb4-2fd4f1abacca tempest-FloatingIPsAssociationNegativeTestJSON-1491050792 tempest-FloatingIPsAssociationNegativeTestJSON-1491050792-project-member] [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] allocate_for_instance() {{(pid=61995) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 525.887828] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-6edc7ef5-8dea-41f2-9cd6-a09a4eb02c4e tempest-ServerExternalEventsTest-367181382 tempest-ServerExternalEventsTest-367181382-project-member] Expecting reply to msg b1d364ac1ca548c3b67e8c2d5e37e41e in queue reply_757213bc08bb49dab178826d88b76f40 [ 525.892694] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a9ab4f494ccb43edae93168af504935d [ 525.899149] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b1d364ac1ca548c3b67e8c2d5e37e41e [ 525.913956] env[61995]: DEBUG oslo_concurrency.lockutils [None req-220a2570-a212-490a-b3c7-e114abe2c150 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] Releasing lock "[datastore1] devstack-image-cache_base/947125f0-9664-40eb-953e-b1373b076c9f" {{(pid=61995) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 525.914293] env[61995]: DEBUG nova.virt.vmwareapi.vmops [None req-220a2570-a212-490a-b3c7-e114abe2c150 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] [instance: 5527576a-d56d-42c3-a7f7-02c66c0d1b3d] Processing image 947125f0-9664-40eb-953e-b1373b076c9f {{(pid=61995) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 525.915145] env[61995]: DEBUG oslo_concurrency.lockutils [None req-220a2570-a212-490a-b3c7-e114abe2c150 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/947125f0-9664-40eb-953e-b1373b076c9f/947125f0-9664-40eb-953e-b1373b076c9f.vmdk" {{(pid=61995) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 526.205370] env[61995]: DEBUG nova.policy [None req-e7c4bbb5-54e6-4867-acb4-2fd4f1abacca tempest-FloatingIPsAssociationNegativeTestJSON-1491050792 tempest-FloatingIPsAssociationNegativeTestJSON-1491050792-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6a946e034d5341919c767b1caf3a2276', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fff64ab9fda740f3aa0d75c2e79dc8b6', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61995) authorize /opt/stack/nova/nova/policy.py:203}} [ 526.377195] env[61995]: DEBUG oslo_vmware.api [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Task: {'id': task-378066, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.694305} completed successfully. {{(pid=61995) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 526.377518] env[61995]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Copied Virtual Disk [datastore1] vmware_temp/fe1628e8-a46e-45d7-a82a-f0252aa48589/947125f0-9664-40eb-953e-b1373b076c9f/tmp-sparse.vmdk to [datastore1] vmware_temp/fe1628e8-a46e-45d7-a82a-f0252aa48589/947125f0-9664-40eb-953e-b1373b076c9f/947125f0-9664-40eb-953e-b1373b076c9f.vmdk {{(pid=61995) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 526.377747] env[61995]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Deleting the datastore file [datastore1] vmware_temp/fe1628e8-a46e-45d7-a82a-f0252aa48589/947125f0-9664-40eb-953e-b1373b076c9f/tmp-sparse.vmdk {{(pid=61995) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 526.378124] env[61995]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-380d802d-68b3-4020-ac19-2f9814ad8abd {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 526.385000] env[61995]: DEBUG oslo_vmware.api [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Waiting for the task: (returnval){ [ 526.385000] env[61995]: value = "task-378067" [ 526.385000] env[61995]: _type = "Task" [ 526.385000] env[61995]: } to complete. {{(pid=61995) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 526.389484] env[61995]: DEBUG nova.compute.manager [None req-e7c4bbb5-54e6-4867-acb4-2fd4f1abacca tempest-FloatingIPsAssociationNegativeTestJSON-1491050792 tempest-FloatingIPsAssociationNegativeTestJSON-1491050792-project-member] [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] Start building block device mappings for instance. {{(pid=61995) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 526.391802] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-e7c4bbb5-54e6-4867-acb4-2fd4f1abacca tempest-FloatingIPsAssociationNegativeTestJSON-1491050792 tempest-FloatingIPsAssociationNegativeTestJSON-1491050792-project-member] Expecting reply to msg 6218b05479b94ea1b8272302e6195b6b in queue reply_757213bc08bb49dab178826d88b76f40 [ 526.401358] env[61995]: DEBUG oslo_vmware.api [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Task: {'id': task-378067, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61995) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 526.440170] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6218b05479b94ea1b8272302e6195b6b [ 526.510305] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbc481fa-67d9-434a-9364-2633dfb675e5 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 526.517753] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac248042-e99a-45f6-9fb2-172cb642caad {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 526.551286] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa4d8a32-b3ba-40d4-a72e-801b04d446fb {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 526.558972] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6427b951-7f3d-421c-ad4e-fc92af99d241 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 526.573974] env[61995]: DEBUG nova.compute.provider_tree [None req-6edc7ef5-8dea-41f2-9cd6-a09a4eb02c4e tempest-ServerExternalEventsTest-367181382 tempest-ServerExternalEventsTest-367181382-project-member] Inventory has not changed in ProviderTree for provider: c68e615f-c0c6-4278-bb7b-a579e68878a7 {{(pid=61995) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 526.574756] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-6edc7ef5-8dea-41f2-9cd6-a09a4eb02c4e tempest-ServerExternalEventsTest-367181382 tempest-ServerExternalEventsTest-367181382-project-member] Expecting reply to msg ffc7f6af58cc47499ca5bf03a5c75c2c in queue reply_757213bc08bb49dab178826d88b76f40 [ 526.585584] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ffc7f6af58cc47499ca5bf03a5c75c2c [ 526.894605] env[61995]: DEBUG oslo_vmware.api [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Task: {'id': task-378067, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.023409} completed successfully. {{(pid=61995) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 526.895052] env[61995]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Deleted the datastore file {{(pid=61995) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 526.895357] env[61995]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Moving file from [datastore1] vmware_temp/fe1628e8-a46e-45d7-a82a-f0252aa48589/947125f0-9664-40eb-953e-b1373b076c9f to [datastore1] devstack-image-cache_base/947125f0-9664-40eb-953e-b1373b076c9f. {{(pid=61995) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 526.895734] env[61995]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-024505fc-3089-4f8b-acd3-83df85927593 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 526.902363] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-e7c4bbb5-54e6-4867-acb4-2fd4f1abacca tempest-FloatingIPsAssociationNegativeTestJSON-1491050792 tempest-FloatingIPsAssociationNegativeTestJSON-1491050792-project-member] Expecting reply to msg becba6fc0f704a64a143946cea6ad125 in queue reply_757213bc08bb49dab178826d88b76f40 [ 526.908178] env[61995]: DEBUG oslo_vmware.api [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Waiting for the task: (returnval){ [ 526.908178] env[61995]: value = "task-378068" [ 526.908178] env[61995]: _type = "Task" [ 526.908178] env[61995]: } to complete. {{(pid=61995) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 526.919576] env[61995]: DEBUG oslo_vmware.api [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Task: {'id': task-378068, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=61995) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 526.941557] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg becba6fc0f704a64a143946cea6ad125 [ 527.082107] env[61995]: DEBUG nova.scheduler.client.report [None req-6edc7ef5-8dea-41f2-9cd6-a09a4eb02c4e tempest-ServerExternalEventsTest-367181382 tempest-ServerExternalEventsTest-367181382-project-member] Inventory has not changed for provider c68e615f-c0c6-4278-bb7b-a579e68878a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61995) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 527.089208] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-6edc7ef5-8dea-41f2-9cd6-a09a4eb02c4e tempest-ServerExternalEventsTest-367181382 tempest-ServerExternalEventsTest-367181382-project-member] Expecting reply to msg 08056e4ee3b849689a2bae5894ad342e in queue reply_757213bc08bb49dab178826d88b76f40 [ 527.109602] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 08056e4ee3b849689a2bae5894ad342e [ 527.322775] env[61995]: DEBUG nova.network.neutron [None req-e7c4bbb5-54e6-4867-acb4-2fd4f1abacca tempest-FloatingIPsAssociationNegativeTestJSON-1491050792 tempest-FloatingIPsAssociationNegativeTestJSON-1491050792-project-member] [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] Successfully created port: ef27cf19-df34-49d6-b558-2f4d11dee5a8 {{(pid=61995) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 527.405931] env[61995]: DEBUG nova.compute.manager [None req-e7c4bbb5-54e6-4867-acb4-2fd4f1abacca tempest-FloatingIPsAssociationNegativeTestJSON-1491050792 tempest-FloatingIPsAssociationNegativeTestJSON-1491050792-project-member] [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] Start spawning the instance on the hypervisor. {{(pid=61995) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 527.420195] env[61995]: DEBUG oslo_vmware.api [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Task: {'id': task-378068, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.0249} completed successfully. {{(pid=61995) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 527.420610] env[61995]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] File moved {{(pid=61995) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 527.420848] env[61995]: DEBUG nova.virt.vmwareapi.vmops [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] [instance: 1bf7f97f-ccfe-4ee0-a6fb-2234769c4b62] Cleaning up location [datastore1] vmware_temp/fe1628e8-a46e-45d7-a82a-f0252aa48589 {{(pid=61995) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 527.421061] env[61995]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Deleting the datastore file [datastore1] vmware_temp/fe1628e8-a46e-45d7-a82a-f0252aa48589 {{(pid=61995) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 527.421514] env[61995]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c831a867-ceae-4dd2-8e75-f1eef39b270a {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 527.438878] env[61995]: DEBUG oslo_vmware.api [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Waiting for the task: (returnval){ [ 527.438878] env[61995]: value = "task-378069" [ 527.438878] env[61995]: _type = "Task" [ 527.438878] env[61995]: } to complete. {{(pid=61995) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 527.444868] env[61995]: DEBUG nova.virt.hardware [None req-e7c4bbb5-54e6-4867-acb4-2fd4f1abacca tempest-FloatingIPsAssociationNegativeTestJSON-1491050792 tempest-FloatingIPsAssociationNegativeTestJSON-1491050792-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-06T07:57:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-06T07:57:11Z,direct_url=,disk_format='vmdk',id=947125f0-9664-40eb-953e-b1373b076c9f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3dc1e6434ef44fd0be01fd4ebf2b62ef',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-06T07:57:12Z,virtual_size=,visibility=), allow threads: False {{(pid=61995) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 527.445129] env[61995]: DEBUG nova.virt.hardware [None req-e7c4bbb5-54e6-4867-acb4-2fd4f1abacca tempest-FloatingIPsAssociationNegativeTestJSON-1491050792 tempest-FloatingIPsAssociationNegativeTestJSON-1491050792-project-member] Flavor limits 0:0:0 {{(pid=61995) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 527.445293] env[61995]: DEBUG nova.virt.hardware [None req-e7c4bbb5-54e6-4867-acb4-2fd4f1abacca tempest-FloatingIPsAssociationNegativeTestJSON-1491050792 tempest-FloatingIPsAssociationNegativeTestJSON-1491050792-project-member] Image limits 0:0:0 {{(pid=61995) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 527.445489] env[61995]: DEBUG nova.virt.hardware [None req-e7c4bbb5-54e6-4867-acb4-2fd4f1abacca tempest-FloatingIPsAssociationNegativeTestJSON-1491050792 tempest-FloatingIPsAssociationNegativeTestJSON-1491050792-project-member] Flavor pref 0:0:0 {{(pid=61995) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 527.446104] env[61995]: DEBUG nova.virt.hardware [None req-e7c4bbb5-54e6-4867-acb4-2fd4f1abacca tempest-FloatingIPsAssociationNegativeTestJSON-1491050792 tempest-FloatingIPsAssociationNegativeTestJSON-1491050792-project-member] Image pref 0:0:0 {{(pid=61995) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 527.446104] env[61995]: DEBUG nova.virt.hardware [None req-e7c4bbb5-54e6-4867-acb4-2fd4f1abacca tempest-FloatingIPsAssociationNegativeTestJSON-1491050792 tempest-FloatingIPsAssociationNegativeTestJSON-1491050792-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61995) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 527.446262] env[61995]: DEBUG nova.virt.hardware [None req-e7c4bbb5-54e6-4867-acb4-2fd4f1abacca tempest-FloatingIPsAssociationNegativeTestJSON-1491050792 tempest-FloatingIPsAssociationNegativeTestJSON-1491050792-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61995) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 527.446443] env[61995]: DEBUG nova.virt.hardware [None req-e7c4bbb5-54e6-4867-acb4-2fd4f1abacca tempest-FloatingIPsAssociationNegativeTestJSON-1491050792 tempest-FloatingIPsAssociationNegativeTestJSON-1491050792-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61995) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 527.446611] env[61995]: DEBUG nova.virt.hardware [None req-e7c4bbb5-54e6-4867-acb4-2fd4f1abacca tempest-FloatingIPsAssociationNegativeTestJSON-1491050792 tempest-FloatingIPsAssociationNegativeTestJSON-1491050792-project-member] Got 1 possible topologies {{(pid=61995) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 527.446892] env[61995]: DEBUG nova.virt.hardware [None req-e7c4bbb5-54e6-4867-acb4-2fd4f1abacca tempest-FloatingIPsAssociationNegativeTestJSON-1491050792 tempest-FloatingIPsAssociationNegativeTestJSON-1491050792-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61995) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 527.447082] env[61995]: DEBUG nova.virt.hardware [None req-e7c4bbb5-54e6-4867-acb4-2fd4f1abacca tempest-FloatingIPsAssociationNegativeTestJSON-1491050792 tempest-FloatingIPsAssociationNegativeTestJSON-1491050792-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61995) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 527.447899] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27a34b76-82ea-4b02-aa66-106872779a54 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 527.457882] env[61995]: DEBUG oslo_vmware.api [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Task: {'id': task-378069, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61995) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 527.464574] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1922f9c-91be-42b5-998f-1a27973b4dcb {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 527.597447] env[61995]: DEBUG oslo_concurrency.lockutils [None req-6edc7ef5-8dea-41f2-9cd6-a09a4eb02c4e tempest-ServerExternalEventsTest-367181382 tempest-ServerExternalEventsTest-367181382-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.218s {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 527.597447] env[61995]: DEBUG nova.compute.manager [None req-6edc7ef5-8dea-41f2-9cd6-a09a4eb02c4e tempest-ServerExternalEventsTest-367181382 tempest-ServerExternalEventsTest-367181382-project-member] [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] Start building networks asynchronously for instance. {{(pid=61995) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 527.597447] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-6edc7ef5-8dea-41f2-9cd6-a09a4eb02c4e tempest-ServerExternalEventsTest-367181382 tempest-ServerExternalEventsTest-367181382-project-member] Expecting reply to msg 4e81d2fbb1d54a758891a684805e6bc4 in queue reply_757213bc08bb49dab178826d88b76f40 [ 527.597447] env[61995]: DEBUG oslo_concurrency.lockutils [None req-ea640fa2-eaf2-48c6-9579-d88038be7ec7 tempest-MigrationsAdminTest-1704279807 tempest-MigrationsAdminTest-1704279807-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.181s {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 527.597447] env[61995]: INFO nova.compute.claims [None req-ea640fa2-eaf2-48c6-9579-d88038be7ec7 tempest-MigrationsAdminTest-1704279807 tempest-MigrationsAdminTest-1704279807-project-member] [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 527.600230] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-ea640fa2-eaf2-48c6-9579-d88038be7ec7 tempest-MigrationsAdminTest-1704279807 tempest-MigrationsAdminTest-1704279807-project-member] Expecting reply to msg aeed6964c70845dcb0780a5849e20e15 in queue reply_757213bc08bb49dab178826d88b76f40 [ 527.647399] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4e81d2fbb1d54a758891a684805e6bc4 [ 527.655751] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg aeed6964c70845dcb0780a5849e20e15 [ 527.952708] env[61995]: DEBUG oslo_vmware.api [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Task: {'id': task-378069, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.02613} completed successfully. {{(pid=61995) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 527.952708] env[61995]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Deleted the datastore file {{(pid=61995) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 527.952708] env[61995]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d6f24204-7f4d-4a74-ab2c-00a5bcd44ded {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 527.959015] env[61995]: DEBUG oslo_vmware.api [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Waiting for the task: (returnval){ [ 527.959015] env[61995]: value = "session[5258be39-d7e5-2e06-99ec-0419db7e1a3e]52de32ea-738d-c754-56bb-6d6344414be6" [ 527.959015] env[61995]: _type = "Task" [ 527.959015] env[61995]: } to complete. {{(pid=61995) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 527.967065] env[61995]: DEBUG oslo_vmware.api [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Task: {'id': session[5258be39-d7e5-2e06-99ec-0419db7e1a3e]52de32ea-738d-c754-56bb-6d6344414be6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61995) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 528.105343] env[61995]: DEBUG nova.compute.utils [None req-6edc7ef5-8dea-41f2-9cd6-a09a4eb02c4e tempest-ServerExternalEventsTest-367181382 tempest-ServerExternalEventsTest-367181382-project-member] Using /dev/sd instead of None {{(pid=61995) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 528.106101] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-6edc7ef5-8dea-41f2-9cd6-a09a4eb02c4e tempest-ServerExternalEventsTest-367181382 tempest-ServerExternalEventsTest-367181382-project-member] Expecting reply to msg 6d7a9b30929d49088ceb6b04bf54608c in queue reply_757213bc08bb49dab178826d88b76f40 [ 528.108166] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-ea640fa2-eaf2-48c6-9579-d88038be7ec7 tempest-MigrationsAdminTest-1704279807 tempest-MigrationsAdminTest-1704279807-project-member] Expecting reply to msg 22a1c4e783d341989369ff2e02aaf84b in queue reply_757213bc08bb49dab178826d88b76f40 [ 528.110090] env[61995]: DEBUG nova.compute.manager [None req-6edc7ef5-8dea-41f2-9cd6-a09a4eb02c4e tempest-ServerExternalEventsTest-367181382 tempest-ServerExternalEventsTest-367181382-project-member] [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] Allocating IP information in the background. {{(pid=61995) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1990}} [ 528.110331] env[61995]: DEBUG nova.network.neutron [None req-6edc7ef5-8dea-41f2-9cd6-a09a4eb02c4e tempest-ServerExternalEventsTest-367181382 tempest-ServerExternalEventsTest-367181382-project-member] [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] allocate_for_instance() {{(pid=61995) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 528.115754] env[61995]: DEBUG oslo_concurrency.lockutils [None req-b5402465-5749-4221-a8d2-1de6379c27b6 tempest-VolumesAdminNegativeTest-869695280 tempest-VolumesAdminNegativeTest-869695280-project-member] Acquiring lock "cfffe8e0-4074-41c9-8e1b-49d621fc3c1b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 528.116244] env[61995]: DEBUG oslo_concurrency.lockutils [None req-b5402465-5749-4221-a8d2-1de6379c27b6 tempest-VolumesAdminNegativeTest-869695280 tempest-VolumesAdminNegativeTest-869695280-project-member] Lock "cfffe8e0-4074-41c9-8e1b-49d621fc3c1b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 528.116755] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-b5402465-5749-4221-a8d2-1de6379c27b6 tempest-VolumesAdminNegativeTest-869695280 tempest-VolumesAdminNegativeTest-869695280-project-member] Expecting reply to msg 6378fccddfde480f8a3475bbb8c45a58 in queue reply_757213bc08bb49dab178826d88b76f40 [ 528.123357] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 22a1c4e783d341989369ff2e02aaf84b [ 528.123836] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6d7a9b30929d49088ceb6b04bf54608c [ 528.134802] env[61995]: DEBUG oslo_service.periodic_task [None req-71c2fabe-40c2-4828-87e6-8ede90ec8b78 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61995) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 528.135337] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6378fccddfde480f8a3475bbb8c45a58 [ 528.135653] env[61995]: DEBUG oslo_service.periodic_task [None req-71c2fabe-40c2-4828-87e6-8ede90ec8b78 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61995) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 528.135850] env[61995]: DEBUG nova.compute.manager [None req-71c2fabe-40c2-4828-87e6-8ede90ec8b78 None None] Starting heal instance info cache {{(pid=61995) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 528.135964] env[61995]: DEBUG nova.compute.manager [None req-71c2fabe-40c2-4828-87e6-8ede90ec8b78 None None] Rebuilding the list of instances to heal {{(pid=61995) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 528.136541] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-71c2fabe-40c2-4828-87e6-8ede90ec8b78 None None] Expecting reply to msg b6a6d062570a4c969b20c4b1ac1a635f in queue reply_757213bc08bb49dab178826d88b76f40 [ 528.150645] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b6a6d062570a4c969b20c4b1ac1a635f [ 528.305798] env[61995]: DEBUG nova.policy [None req-6edc7ef5-8dea-41f2-9cd6-a09a4eb02c4e tempest-ServerExternalEventsTest-367181382 tempest-ServerExternalEventsTest-367181382-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9826267f005e450aa6c4ed04e69cb40d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0d02b64932a142fbb75df5704932bdeb', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61995) authorize /opt/stack/nova/nova/policy.py:203}} [ 528.469651] env[61995]: DEBUG oslo_vmware.api [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Task: {'id': session[5258be39-d7e5-2e06-99ec-0419db7e1a3e]52de32ea-738d-c754-56bb-6d6344414be6, 'name': SearchDatastore_Task, 'duration_secs': 0.011201} completed successfully. {{(pid=61995) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 528.470036] env[61995]: DEBUG oslo_concurrency.lockutils [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Releasing lock "[datastore1] devstack-image-cache_base/947125f0-9664-40eb-953e-b1373b076c9f/947125f0-9664-40eb-953e-b1373b076c9f.vmdk" {{(pid=61995) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 528.470317] env[61995]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/947125f0-9664-40eb-953e-b1373b076c9f/947125f0-9664-40eb-953e-b1373b076c9f.vmdk to [datastore1] 1bf7f97f-ccfe-4ee0-a6fb-2234769c4b62/1bf7f97f-ccfe-4ee0-a6fb-2234769c4b62.vmdk {{(pid=61995) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 528.470634] env[61995]: DEBUG oslo_concurrency.lockutils [None req-220a2570-a212-490a-b3c7-e114abe2c150 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] Acquired lock "[datastore1] devstack-image-cache_base/947125f0-9664-40eb-953e-b1373b076c9f/947125f0-9664-40eb-953e-b1373b076c9f.vmdk" {{(pid=61995) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 528.470875] env[61995]: DEBUG nova.virt.vmwareapi.ds_util [None req-220a2570-a212-490a-b3c7-e114abe2c150 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61995) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 528.471284] env[61995]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f9416f34-66da-4aa9-9975-c4c79f1e332a {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 528.473485] env[61995]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-aaec6d4d-8f7f-4feb-993b-2a757e0d87ad {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 528.483753] env[61995]: DEBUG oslo_vmware.api [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Waiting for the task: (returnval){ [ 528.483753] env[61995]: value = "task-378070" [ 528.483753] env[61995]: _type = "Task" [ 528.483753] env[61995]: } to complete. {{(pid=61995) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 528.483753] env[61995]: DEBUG nova.virt.vmwareapi.ds_util [None req-220a2570-a212-490a-b3c7-e114abe2c150 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61995) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 528.483753] env[61995]: DEBUG nova.virt.vmwareapi.vmops [None req-220a2570-a212-490a-b3c7-e114abe2c150 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61995) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 528.485907] env[61995]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-628b6493-f72c-4c73-81b8-7c68c5ff18bb {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 528.490072] env[61995]: DEBUG oslo_vmware.api [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Task: {'id': task-378070, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61995) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 528.493417] env[61995]: DEBUG oslo_vmware.api [None req-220a2570-a212-490a-b3c7-e114abe2c150 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] Waiting for the task: (returnval){ [ 528.493417] env[61995]: value = "session[5258be39-d7e5-2e06-99ec-0419db7e1a3e]52d903dd-b26e-39d5-0805-e42057399d51" [ 528.493417] env[61995]: _type = "Task" [ 528.493417] env[61995]: } to complete. {{(pid=61995) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 528.500928] env[61995]: DEBUG oslo_vmware.api [None req-220a2570-a212-490a-b3c7-e114abe2c150 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] Task: {'id': session[5258be39-d7e5-2e06-99ec-0419db7e1a3e]52d903dd-b26e-39d5-0805-e42057399d51, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61995) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 528.613731] env[61995]: DEBUG nova.compute.manager [None req-6edc7ef5-8dea-41f2-9cd6-a09a4eb02c4e tempest-ServerExternalEventsTest-367181382 tempest-ServerExternalEventsTest-367181382-project-member] [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] Start building block device mappings for instance. {{(pid=61995) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 528.613731] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-6edc7ef5-8dea-41f2-9cd6-a09a4eb02c4e tempest-ServerExternalEventsTest-367181382 tempest-ServerExternalEventsTest-367181382-project-member] Expecting reply to msg 3bfca97de05e4f32abac2fbd8d6767bc in queue reply_757213bc08bb49dab178826d88b76f40 [ 528.619022] env[61995]: DEBUG nova.compute.manager [None req-b5402465-5749-4221-a8d2-1de6379c27b6 tempest-VolumesAdminNegativeTest-869695280 tempest-VolumesAdminNegativeTest-869695280-project-member] [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] Starting instance... {{(pid=61995) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 528.621546] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-b5402465-5749-4221-a8d2-1de6379c27b6 tempest-VolumesAdminNegativeTest-869695280 tempest-VolumesAdminNegativeTest-869695280-project-member] Expecting reply to msg 58d6cff82f614ba29915f463cd16b505 in queue reply_757213bc08bb49dab178826d88b76f40 [ 528.640529] env[61995]: DEBUG nova.compute.manager [None req-71c2fabe-40c2-4828-87e6-8ede90ec8b78 None None] [instance: 1bf7f97f-ccfe-4ee0-a6fb-2234769c4b62] Skipping network cache update for instance because it is Building. {{(pid=61995) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9954}} [ 528.640840] env[61995]: DEBUG nova.compute.manager [None req-71c2fabe-40c2-4828-87e6-8ede90ec8b78 None None] [instance: 5527576a-d56d-42c3-a7f7-02c66c0d1b3d] Skipping network cache update for instance because it is Building. {{(pid=61995) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9954}} [ 528.641082] env[61995]: DEBUG nova.compute.manager [None req-71c2fabe-40c2-4828-87e6-8ede90ec8b78 None None] [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] Skipping network cache update for instance because it is Building. {{(pid=61995) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9954}} [ 528.641296] env[61995]: DEBUG nova.compute.manager [None req-71c2fabe-40c2-4828-87e6-8ede90ec8b78 None None] [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] Skipping network cache update for instance because it is Building. {{(pid=61995) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9954}} [ 528.642073] env[61995]: DEBUG nova.compute.manager [None req-71c2fabe-40c2-4828-87e6-8ede90ec8b78 None None] [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] Skipping network cache update for instance because it is Building. {{(pid=61995) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9954}} [ 528.642380] env[61995]: DEBUG nova.compute.manager [None req-71c2fabe-40c2-4828-87e6-8ede90ec8b78 None None] Didn't find any instances for network info cache update. {{(pid=61995) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10027}} [ 528.647052] env[61995]: DEBUG oslo_service.periodic_task [None req-71c2fabe-40c2-4828-87e6-8ede90ec8b78 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61995) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 528.647696] env[61995]: DEBUG oslo_service.periodic_task [None req-71c2fabe-40c2-4828-87e6-8ede90ec8b78 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61995) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 528.649779] env[61995]: DEBUG oslo_service.periodic_task [None req-71c2fabe-40c2-4828-87e6-8ede90ec8b78 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61995) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 528.650204] env[61995]: DEBUG oslo_service.periodic_task [None req-71c2fabe-40c2-4828-87e6-8ede90ec8b78 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61995) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 528.650620] env[61995]: DEBUG oslo_service.periodic_task [None req-71c2fabe-40c2-4828-87e6-8ede90ec8b78 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61995) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 528.650968] env[61995]: DEBUG oslo_service.periodic_task [None req-71c2fabe-40c2-4828-87e6-8ede90ec8b78 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61995) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 528.651241] env[61995]: DEBUG nova.compute.manager [None req-71c2fabe-40c2-4828-87e6-8ede90ec8b78 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61995) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10560}} [ 528.651485] env[61995]: DEBUG oslo_service.periodic_task [None req-71c2fabe-40c2-4828-87e6-8ede90ec8b78 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61995) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 528.651950] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-71c2fabe-40c2-4828-87e6-8ede90ec8b78 None None] Expecting reply to msg 5ddac5d3981843e9896629a1405d5901 in queue reply_757213bc08bb49dab178826d88b76f40 [ 528.676304] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5ddac5d3981843e9896629a1405d5901 [ 528.677135] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3bfca97de05e4f32abac2fbd8d6767bc [ 528.677747] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 58d6cff82f614ba29915f463cd16b505 [ 528.764462] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da2d99da-9554-4a78-9e8a-3af6ed11dd6e {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 528.773326] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d6ffef0-e707-4110-9a77-d2cd7583b742 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 528.807369] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d44a243-7cec-45b3-ae7f-29d438fddbab {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 528.815628] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-734dec3b-269c-4bb1-9afa-6f187bf45775 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 528.834402] env[61995]: DEBUG nova.compute.provider_tree [None req-ea640fa2-eaf2-48c6-9579-d88038be7ec7 tempest-MigrationsAdminTest-1704279807 tempest-MigrationsAdminTest-1704279807-project-member] Inventory has not changed in ProviderTree for provider: c68e615f-c0c6-4278-bb7b-a579e68878a7 {{(pid=61995) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 528.835123] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-ea640fa2-eaf2-48c6-9579-d88038be7ec7 tempest-MigrationsAdminTest-1704279807 tempest-MigrationsAdminTest-1704279807-project-member] Expecting reply to msg 3f01b22eea3148ca82d5070553ba44f2 in queue reply_757213bc08bb49dab178826d88b76f40 [ 528.844283] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3f01b22eea3148ca82d5070553ba44f2 [ 528.863895] env[61995]: DEBUG nova.network.neutron [None req-6edc7ef5-8dea-41f2-9cd6-a09a4eb02c4e tempest-ServerExternalEventsTest-367181382 tempest-ServerExternalEventsTest-367181382-project-member] [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] Successfully created port: b996cfab-151e-438f-929d-3393ad4e64f7 {{(pid=61995) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 528.989598] env[61995]: DEBUG oslo_vmware.api [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Task: {'id': task-378070, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=61995) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 529.003115] env[61995]: DEBUG oslo_vmware.api [None req-220a2570-a212-490a-b3c7-e114abe2c150 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] Task: {'id': session[5258be39-d7e5-2e06-99ec-0419db7e1a3e]52d903dd-b26e-39d5-0805-e42057399d51, 'name': SearchDatastore_Task, 'duration_secs': 0.007392} completed successfully. {{(pid=61995) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 529.004453] env[61995]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-77f581fb-2bd2-489f-a4d9-dc32c346e9e5 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 529.010388] env[61995]: DEBUG oslo_vmware.api [None req-220a2570-a212-490a-b3c7-e114abe2c150 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] Waiting for the task: (returnval){ [ 529.010388] env[61995]: value = "session[5258be39-d7e5-2e06-99ec-0419db7e1a3e]52369bbc-0226-d420-595a-115dd357cff0" [ 529.010388] env[61995]: _type = "Task" [ 529.010388] env[61995]: } to complete. {{(pid=61995) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 529.019562] env[61995]: DEBUG oslo_vmware.api [None req-220a2570-a212-490a-b3c7-e114abe2c150 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] Task: {'id': session[5258be39-d7e5-2e06-99ec-0419db7e1a3e]52369bbc-0226-d420-595a-115dd357cff0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61995) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 529.123246] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-6edc7ef5-8dea-41f2-9cd6-a09a4eb02c4e tempest-ServerExternalEventsTest-367181382 tempest-ServerExternalEventsTest-367181382-project-member] Expecting reply to msg 390259b1e5ba465e951032acc366b12d in queue reply_757213bc08bb49dab178826d88b76f40 [ 529.156694] env[61995]: DEBUG oslo_concurrency.lockutils [None req-b5402465-5749-4221-a8d2-1de6379c27b6 tempest-VolumesAdminNegativeTest-869695280 tempest-VolumesAdminNegativeTest-869695280-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 529.157747] env[61995]: DEBUG oslo_concurrency.lockutils [None req-71c2fabe-40c2-4828-87e6-8ede90ec8b78 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 529.173056] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 390259b1e5ba465e951032acc366b12d [ 529.338161] env[61995]: DEBUG nova.scheduler.client.report [None req-ea640fa2-eaf2-48c6-9579-d88038be7ec7 tempest-MigrationsAdminTest-1704279807 tempest-MigrationsAdminTest-1704279807-project-member] Inventory has not changed for provider c68e615f-c0c6-4278-bb7b-a579e68878a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61995) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 529.341117] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-ea640fa2-eaf2-48c6-9579-d88038be7ec7 tempest-MigrationsAdminTest-1704279807 tempest-MigrationsAdminTest-1704279807-project-member] Expecting reply to msg 4fa77924dfc343039170d33a319f684e in queue reply_757213bc08bb49dab178826d88b76f40 [ 529.354417] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4fa77924dfc343039170d33a319f684e [ 529.490587] env[61995]: DEBUG oslo_vmware.api [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Task: {'id': task-378070, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.539958} completed successfully. {{(pid=61995) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 529.490881] env[61995]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/947125f0-9664-40eb-953e-b1373b076c9f/947125f0-9664-40eb-953e-b1373b076c9f.vmdk to [datastore1] 1bf7f97f-ccfe-4ee0-a6fb-2234769c4b62/1bf7f97f-ccfe-4ee0-a6fb-2234769c4b62.vmdk {{(pid=61995) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 529.491133] env[61995]: DEBUG nova.virt.vmwareapi.vmops [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] [instance: 1bf7f97f-ccfe-4ee0-a6fb-2234769c4b62] Extending root virtual disk to 1048576 {{(pid=61995) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 529.491394] env[61995]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d1c14c42-4ce8-47e5-84ee-07574e6ee833 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 529.499613] env[61995]: DEBUG oslo_vmware.api [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Waiting for the task: (returnval){ [ 529.499613] env[61995]: value = "task-378071" [ 529.499613] env[61995]: _type = "Task" [ 529.499613] env[61995]: } to complete. {{(pid=61995) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 529.510539] env[61995]: DEBUG oslo_vmware.api [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Task: {'id': task-378071, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61995) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 529.519156] env[61995]: DEBUG oslo_vmware.api [None req-220a2570-a212-490a-b3c7-e114abe2c150 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] Task: {'id': session[5258be39-d7e5-2e06-99ec-0419db7e1a3e]52369bbc-0226-d420-595a-115dd357cff0, 'name': SearchDatastore_Task, 'duration_secs': 0.01754} completed successfully. {{(pid=61995) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 529.519536] env[61995]: DEBUG oslo_concurrency.lockutils [None req-220a2570-a212-490a-b3c7-e114abe2c150 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] Releasing lock "[datastore1] devstack-image-cache_base/947125f0-9664-40eb-953e-b1373b076c9f/947125f0-9664-40eb-953e-b1373b076c9f.vmdk" {{(pid=61995) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 529.519720] env[61995]: DEBUG nova.virt.vmwareapi.vm_util [None req-220a2570-a212-490a-b3c7-e114abe2c150 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/947125f0-9664-40eb-953e-b1373b076c9f/947125f0-9664-40eb-953e-b1373b076c9f.vmdk to [datastore1] 5527576a-d56d-42c3-a7f7-02c66c0d1b3d/5527576a-d56d-42c3-a7f7-02c66c0d1b3d.vmdk {{(pid=61995) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 529.519990] env[61995]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-42bea5e5-34d1-4960-b135-f70e01c98cc9 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 529.526042] env[61995]: DEBUG oslo_vmware.api [None req-220a2570-a212-490a-b3c7-e114abe2c150 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] Waiting for the task: (returnval){ [ 529.526042] env[61995]: value = "task-378072" [ 529.526042] env[61995]: _type = "Task" [ 529.526042] env[61995]: } to complete. {{(pid=61995) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 529.535477] env[61995]: DEBUG oslo_vmware.api [None req-220a2570-a212-490a-b3c7-e114abe2c150 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] Task: {'id': task-378072, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61995) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 529.627579] env[61995]: DEBUG nova.compute.manager [None req-6edc7ef5-8dea-41f2-9cd6-a09a4eb02c4e tempest-ServerExternalEventsTest-367181382 tempest-ServerExternalEventsTest-367181382-project-member] [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] Start spawning the instance on the hypervisor. {{(pid=61995) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 529.654517] env[61995]: DEBUG nova.virt.hardware [None req-6edc7ef5-8dea-41f2-9cd6-a09a4eb02c4e tempest-ServerExternalEventsTest-367181382 tempest-ServerExternalEventsTest-367181382-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-06T07:57:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-06T07:57:11Z,direct_url=,disk_format='vmdk',id=947125f0-9664-40eb-953e-b1373b076c9f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3dc1e6434ef44fd0be01fd4ebf2b62ef',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-06T07:57:12Z,virtual_size=,visibility=), allow threads: False {{(pid=61995) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 529.654732] env[61995]: DEBUG nova.virt.hardware [None req-6edc7ef5-8dea-41f2-9cd6-a09a4eb02c4e tempest-ServerExternalEventsTest-367181382 tempest-ServerExternalEventsTest-367181382-project-member] Flavor limits 0:0:0 {{(pid=61995) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 529.654899] env[61995]: DEBUG nova.virt.hardware [None req-6edc7ef5-8dea-41f2-9cd6-a09a4eb02c4e tempest-ServerExternalEventsTest-367181382 tempest-ServerExternalEventsTest-367181382-project-member] Image limits 0:0:0 {{(pid=61995) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 529.655081] env[61995]: DEBUG nova.virt.hardware [None req-6edc7ef5-8dea-41f2-9cd6-a09a4eb02c4e tempest-ServerExternalEventsTest-367181382 tempest-ServerExternalEventsTest-367181382-project-member] Flavor pref 0:0:0 {{(pid=61995) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 529.655219] env[61995]: DEBUG nova.virt.hardware [None req-6edc7ef5-8dea-41f2-9cd6-a09a4eb02c4e tempest-ServerExternalEventsTest-367181382 tempest-ServerExternalEventsTest-367181382-project-member] Image pref 0:0:0 {{(pid=61995) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 529.655367] env[61995]: DEBUG nova.virt.hardware [None req-6edc7ef5-8dea-41f2-9cd6-a09a4eb02c4e tempest-ServerExternalEventsTest-367181382 tempest-ServerExternalEventsTest-367181382-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61995) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 529.655585] env[61995]: DEBUG nova.virt.hardware [None req-6edc7ef5-8dea-41f2-9cd6-a09a4eb02c4e tempest-ServerExternalEventsTest-367181382 tempest-ServerExternalEventsTest-367181382-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61995) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 529.655757] env[61995]: DEBUG nova.virt.hardware [None req-6edc7ef5-8dea-41f2-9cd6-a09a4eb02c4e tempest-ServerExternalEventsTest-367181382 tempest-ServerExternalEventsTest-367181382-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61995) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 529.655910] env[61995]: DEBUG nova.virt.hardware [None req-6edc7ef5-8dea-41f2-9cd6-a09a4eb02c4e tempest-ServerExternalEventsTest-367181382 tempest-ServerExternalEventsTest-367181382-project-member] Got 1 possible topologies {{(pid=61995) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 529.656144] env[61995]: DEBUG nova.virt.hardware [None req-6edc7ef5-8dea-41f2-9cd6-a09a4eb02c4e tempest-ServerExternalEventsTest-367181382 tempest-ServerExternalEventsTest-367181382-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61995) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 529.656296] env[61995]: DEBUG nova.virt.hardware [None req-6edc7ef5-8dea-41f2-9cd6-a09a4eb02c4e tempest-ServerExternalEventsTest-367181382 tempest-ServerExternalEventsTest-367181382-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61995) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 529.657201] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bb92554-7302-4d5a-ad75-12098157092b {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 529.666403] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-560b80a2-2dea-4078-91a1-8f1f424a4910 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 529.844096] env[61995]: DEBUG oslo_concurrency.lockutils [None req-ea640fa2-eaf2-48c6-9579-d88038be7ec7 tempest-MigrationsAdminTest-1704279807 tempest-MigrationsAdminTest-1704279807-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.248s {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 529.844640] env[61995]: DEBUG nova.compute.manager [None req-ea640fa2-eaf2-48c6-9579-d88038be7ec7 tempest-MigrationsAdminTest-1704279807 tempest-MigrationsAdminTest-1704279807-project-member] [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] Start building networks asynchronously for instance. {{(pid=61995) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 529.846391] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-ea640fa2-eaf2-48c6-9579-d88038be7ec7 tempest-MigrationsAdminTest-1704279807 tempest-MigrationsAdminTest-1704279807-project-member] Expecting reply to msg bdbbaa5fc1c144eeafb1f45d561c1a52 in queue reply_757213bc08bb49dab178826d88b76f40 [ 529.847431] env[61995]: DEBUG oslo_concurrency.lockutils [None req-f3a8686d-cadc-4239-bfc5-8c2bb0366eb3 tempest-ServerDiagnosticsTest-1806019006 tempest-ServerDiagnosticsTest-1806019006-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.277s {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 529.851373] env[61995]: INFO nova.compute.claims [None req-f3a8686d-cadc-4239-bfc5-8c2bb0366eb3 tempest-ServerDiagnosticsTest-1806019006 tempest-ServerDiagnosticsTest-1806019006-project-member] [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 529.853061] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-f3a8686d-cadc-4239-bfc5-8c2bb0366eb3 tempest-ServerDiagnosticsTest-1806019006 tempest-ServerDiagnosticsTest-1806019006-project-member] Expecting reply to msg 2f8d07175fe047dfa7e43d3ff4698f73 in queue reply_757213bc08bb49dab178826d88b76f40 [ 529.882431] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bdbbaa5fc1c144eeafb1f45d561c1a52 [ 529.904820] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2f8d07175fe047dfa7e43d3ff4698f73 [ 530.011982] env[61995]: DEBUG oslo_vmware.api [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Task: {'id': task-378071, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.061936} completed successfully. {{(pid=61995) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 530.012600] env[61995]: DEBUG nova.virt.vmwareapi.vmops [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] [instance: 1bf7f97f-ccfe-4ee0-a6fb-2234769c4b62] Extended root virtual disk {{(pid=61995) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 530.013454] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e67cac9-a496-4256-9ecf-d0e55a777ee6 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.038044] env[61995]: DEBUG nova.virt.vmwareapi.volumeops [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] [instance: 1bf7f97f-ccfe-4ee0-a6fb-2234769c4b62] Reconfiguring VM instance instance-00000001 to attach disk [datastore1] 1bf7f97f-ccfe-4ee0-a6fb-2234769c4b62/1bf7f97f-ccfe-4ee0-a6fb-2234769c4b62.vmdk or device None with type sparse {{(pid=61995) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 530.041526] env[61995]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6bd5a5f9-bf12-4bac-aea4-f1bd888cd107 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.066860] env[61995]: DEBUG oslo_vmware.api [None req-220a2570-a212-490a-b3c7-e114abe2c150 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] Task: {'id': task-378072, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.490312} completed successfully. {{(pid=61995) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 530.069134] env[61995]: DEBUG nova.virt.vmwareapi.vm_util [None req-220a2570-a212-490a-b3c7-e114abe2c150 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/947125f0-9664-40eb-953e-b1373b076c9f/947125f0-9664-40eb-953e-b1373b076c9f.vmdk to [datastore1] 5527576a-d56d-42c3-a7f7-02c66c0d1b3d/5527576a-d56d-42c3-a7f7-02c66c0d1b3d.vmdk {{(pid=61995) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 530.069484] env[61995]: DEBUG nova.virt.vmwareapi.vmops [None req-220a2570-a212-490a-b3c7-e114abe2c150 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] [instance: 5527576a-d56d-42c3-a7f7-02c66c0d1b3d] Extending root virtual disk to 1048576 {{(pid=61995) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 530.069641] env[61995]: DEBUG oslo_vmware.api [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Waiting for the task: (returnval){ [ 530.069641] env[61995]: value = "task-378073" [ 530.069641] env[61995]: _type = "Task" [ 530.069641] env[61995]: } to complete. {{(pid=61995) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 530.070128] env[61995]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a3fd39bb-b402-45de-b477-313b5d170351 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.080848] env[61995]: DEBUG oslo_vmware.api [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Task: {'id': task-378073, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61995) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 530.082431] env[61995]: DEBUG oslo_vmware.api [None req-220a2570-a212-490a-b3c7-e114abe2c150 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] Waiting for the task: (returnval){ [ 530.082431] env[61995]: value = "task-378074" [ 530.082431] env[61995]: _type = "Task" [ 530.082431] env[61995]: } to complete. {{(pid=61995) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 530.090314] env[61995]: DEBUG oslo_vmware.api [None req-220a2570-a212-490a-b3c7-e114abe2c150 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] Task: {'id': task-378074, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61995) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 530.252517] env[61995]: ERROR nova.compute.manager [None req-e7c4bbb5-54e6-4867-acb4-2fd4f1abacca tempest-FloatingIPsAssociationNegativeTestJSON-1491050792 tempest-FloatingIPsAssociationNegativeTestJSON-1491050792-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port ef27cf19-df34-49d6-b558-2f4d11dee5a8, please check neutron logs for more information. [ 530.252517] env[61995]: ERROR nova.compute.manager Traceback (most recent call last): [ 530.252517] env[61995]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1998, in _allocate_network_async [ 530.252517] env[61995]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 530.252517] env[61995]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 530.252517] env[61995]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 530.252517] env[61995]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 530.252517] env[61995]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 530.252517] env[61995]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 530.252517] env[61995]: ERROR nova.compute.manager self.force_reraise() [ 530.252517] env[61995]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 530.252517] env[61995]: ERROR nova.compute.manager raise self.value [ 530.252517] env[61995]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 530.252517] env[61995]: ERROR nova.compute.manager updated_port = self._update_port( [ 530.252517] env[61995]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 530.252517] env[61995]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 530.252947] env[61995]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 530.252947] env[61995]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 530.252947] env[61995]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port ef27cf19-df34-49d6-b558-2f4d11dee5a8, please check neutron logs for more information. [ 530.252947] env[61995]: ERROR nova.compute.manager [ 530.252947] env[61995]: Traceback (most recent call last): [ 530.252947] env[61995]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 530.252947] env[61995]: listener.cb(fileno) [ 530.252947] env[61995]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 530.252947] env[61995]: result = function(*args, **kwargs) [ 530.252947] env[61995]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 530.252947] env[61995]: return func(*args, **kwargs) [ 530.252947] env[61995]: File "/opt/stack/nova/nova/compute/manager.py", line 2020, in _allocate_network_async [ 530.252947] env[61995]: raise e [ 530.252947] env[61995]: File "/opt/stack/nova/nova/compute/manager.py", line 1998, in _allocate_network_async [ 530.252947] env[61995]: nwinfo = self.network_api.allocate_for_instance( [ 530.252947] env[61995]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 530.252947] env[61995]: created_port_ids = self._update_ports_for_instance( [ 530.252947] env[61995]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 530.252947] env[61995]: with excutils.save_and_reraise_exception(): [ 530.252947] env[61995]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 530.252947] env[61995]: self.force_reraise() [ 530.252947] env[61995]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 530.252947] env[61995]: raise self.value [ 530.252947] env[61995]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 530.252947] env[61995]: updated_port = self._update_port( [ 530.252947] env[61995]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 530.252947] env[61995]: _ensure_no_port_binding_failure(port) [ 530.252947] env[61995]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 530.252947] env[61995]: raise exception.PortBindingFailed(port_id=port['id']) [ 530.253717] env[61995]: nova.exception.PortBindingFailed: Binding failed for port ef27cf19-df34-49d6-b558-2f4d11dee5a8, please check neutron logs for more information. [ 530.253717] env[61995]: Removing descriptor: 15 [ 530.254473] env[61995]: ERROR nova.compute.manager [None req-e7c4bbb5-54e6-4867-acb4-2fd4f1abacca tempest-FloatingIPsAssociationNegativeTestJSON-1491050792 tempest-FloatingIPsAssociationNegativeTestJSON-1491050792-project-member] [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port ef27cf19-df34-49d6-b558-2f4d11dee5a8, please check neutron logs for more information. [ 530.254473] env[61995]: ERROR nova.compute.manager [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] Traceback (most recent call last): [ 530.254473] env[61995]: ERROR nova.compute.manager [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] File "/opt/stack/nova/nova/compute/manager.py", line 2903, in _build_resources [ 530.254473] env[61995]: ERROR nova.compute.manager [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] yield resources [ 530.254473] env[61995]: ERROR nova.compute.manager [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] File "/opt/stack/nova/nova/compute/manager.py", line 2650, in _build_and_run_instance [ 530.254473] env[61995]: ERROR nova.compute.manager [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] self.driver.spawn(context, instance, image_meta, [ 530.254473] env[61995]: ERROR nova.compute.manager [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 530.254473] env[61995]: ERROR nova.compute.manager [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 530.254473] env[61995]: ERROR nova.compute.manager [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 530.254473] env[61995]: ERROR nova.compute.manager [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] vm_ref = self.build_virtual_machine(instance, [ 530.254473] env[61995]: ERROR nova.compute.manager [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 530.254805] env[61995]: ERROR nova.compute.manager [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] vif_infos = vmwarevif.get_vif_info(self._session, [ 530.254805] env[61995]: ERROR nova.compute.manager [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 530.254805] env[61995]: ERROR nova.compute.manager [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] for vif in network_info: [ 530.254805] env[61995]: ERROR nova.compute.manager [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 530.254805] env[61995]: ERROR nova.compute.manager [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] return self._sync_wrapper(fn, *args, **kwargs) [ 530.254805] env[61995]: ERROR nova.compute.manager [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 530.254805] env[61995]: ERROR nova.compute.manager [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] self.wait() [ 530.254805] env[61995]: ERROR nova.compute.manager [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 530.254805] env[61995]: ERROR nova.compute.manager [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] self[:] = self._gt.wait() [ 530.254805] env[61995]: ERROR nova.compute.manager [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 530.254805] env[61995]: ERROR nova.compute.manager [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] return self._exit_event.wait() [ 530.254805] env[61995]: ERROR nova.compute.manager [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 530.254805] env[61995]: ERROR nova.compute.manager [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] result = hub.switch() [ 530.255155] env[61995]: ERROR nova.compute.manager [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 530.255155] env[61995]: ERROR nova.compute.manager [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] return self.greenlet.switch() [ 530.255155] env[61995]: ERROR nova.compute.manager [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 530.255155] env[61995]: ERROR nova.compute.manager [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] result = function(*args, **kwargs) [ 530.255155] env[61995]: ERROR nova.compute.manager [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 530.255155] env[61995]: ERROR nova.compute.manager [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] return func(*args, **kwargs) [ 530.255155] env[61995]: ERROR nova.compute.manager [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] File "/opt/stack/nova/nova/compute/manager.py", line 2020, in _allocate_network_async [ 530.255155] env[61995]: ERROR nova.compute.manager [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] raise e [ 530.255155] env[61995]: ERROR nova.compute.manager [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] File "/opt/stack/nova/nova/compute/manager.py", line 1998, in _allocate_network_async [ 530.255155] env[61995]: ERROR nova.compute.manager [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] nwinfo = self.network_api.allocate_for_instance( [ 530.255155] env[61995]: ERROR nova.compute.manager [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 530.255155] env[61995]: ERROR nova.compute.manager [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] created_port_ids = self._update_ports_for_instance( [ 530.255155] env[61995]: ERROR nova.compute.manager [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 530.255496] env[61995]: ERROR nova.compute.manager [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] with excutils.save_and_reraise_exception(): [ 530.255496] env[61995]: ERROR nova.compute.manager [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 530.255496] env[61995]: ERROR nova.compute.manager [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] self.force_reraise() [ 530.255496] env[61995]: ERROR nova.compute.manager [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 530.255496] env[61995]: ERROR nova.compute.manager [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] raise self.value [ 530.255496] env[61995]: ERROR nova.compute.manager [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 530.255496] env[61995]: ERROR nova.compute.manager [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] updated_port = self._update_port( [ 530.255496] env[61995]: ERROR nova.compute.manager [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 530.255496] env[61995]: ERROR nova.compute.manager [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] _ensure_no_port_binding_failure(port) [ 530.255496] env[61995]: ERROR nova.compute.manager [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 530.255496] env[61995]: ERROR nova.compute.manager [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] raise exception.PortBindingFailed(port_id=port['id']) [ 530.255496] env[61995]: ERROR nova.compute.manager [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] nova.exception.PortBindingFailed: Binding failed for port ef27cf19-df34-49d6-b558-2f4d11dee5a8, please check neutron logs for more information. [ 530.255496] env[61995]: ERROR nova.compute.manager [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] [ 530.255866] env[61995]: INFO nova.compute.manager [None req-e7c4bbb5-54e6-4867-acb4-2fd4f1abacca tempest-FloatingIPsAssociationNegativeTestJSON-1491050792 tempest-FloatingIPsAssociationNegativeTestJSON-1491050792-project-member] [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] Terminating instance [ 530.260405] env[61995]: DEBUG oslo_concurrency.lockutils [None req-e7c4bbb5-54e6-4867-acb4-2fd4f1abacca tempest-FloatingIPsAssociationNegativeTestJSON-1491050792 tempest-FloatingIPsAssociationNegativeTestJSON-1491050792-project-member] Acquiring lock "refresh_cache-cd3d1cbb-be77-4fd0-9666-0d544a19a16d" {{(pid=61995) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 530.260576] env[61995]: DEBUG oslo_concurrency.lockutils [None req-e7c4bbb5-54e6-4867-acb4-2fd4f1abacca tempest-FloatingIPsAssociationNegativeTestJSON-1491050792 tempest-FloatingIPsAssociationNegativeTestJSON-1491050792-project-member] Acquired lock "refresh_cache-cd3d1cbb-be77-4fd0-9666-0d544a19a16d" {{(pid=61995) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 530.260739] env[61995]: DEBUG nova.network.neutron [None req-e7c4bbb5-54e6-4867-acb4-2fd4f1abacca tempest-FloatingIPsAssociationNegativeTestJSON-1491050792 tempest-FloatingIPsAssociationNegativeTestJSON-1491050792-project-member] [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] Building network info cache for instance {{(pid=61995) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 530.261194] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-e7c4bbb5-54e6-4867-acb4-2fd4f1abacca tempest-FloatingIPsAssociationNegativeTestJSON-1491050792 tempest-FloatingIPsAssociationNegativeTestJSON-1491050792-project-member] Expecting reply to msg 6328a3b28b514f1c97ab39683214ee3c in queue reply_757213bc08bb49dab178826d88b76f40 [ 530.270052] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6328a3b28b514f1c97ab39683214ee3c [ 530.356263] env[61995]: DEBUG nova.compute.utils [None req-ea640fa2-eaf2-48c6-9579-d88038be7ec7 tempest-MigrationsAdminTest-1704279807 tempest-MigrationsAdminTest-1704279807-project-member] Using /dev/sd instead of None {{(pid=61995) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 530.356933] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-ea640fa2-eaf2-48c6-9579-d88038be7ec7 tempest-MigrationsAdminTest-1704279807 tempest-MigrationsAdminTest-1704279807-project-member] Expecting reply to msg 5e0ae83845044f55b37c6a156dfaf2de in queue reply_757213bc08bb49dab178826d88b76f40 [ 530.357855] env[61995]: DEBUG nova.compute.manager [None req-ea640fa2-eaf2-48c6-9579-d88038be7ec7 tempest-MigrationsAdminTest-1704279807 tempest-MigrationsAdminTest-1704279807-project-member] [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] Allocating IP information in the background. {{(pid=61995) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1990}} [ 530.358008] env[61995]: DEBUG nova.network.neutron [None req-ea640fa2-eaf2-48c6-9579-d88038be7ec7 tempest-MigrationsAdminTest-1704279807 tempest-MigrationsAdminTest-1704279807-project-member] [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] allocate_for_instance() {{(pid=61995) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 530.364926] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-f3a8686d-cadc-4239-bfc5-8c2bb0366eb3 tempest-ServerDiagnosticsTest-1806019006 tempest-ServerDiagnosticsTest-1806019006-project-member] Expecting reply to msg 6022aa8f2fcd449bb74946a845802db3 in queue reply_757213bc08bb49dab178826d88b76f40 [ 530.369983] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5e0ae83845044f55b37c6a156dfaf2de [ 530.373510] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6022aa8f2fcd449bb74946a845802db3 [ 530.493292] env[61995]: DEBUG nova.policy [None req-ea640fa2-eaf2-48c6-9579-d88038be7ec7 tempest-MigrationsAdminTest-1704279807 tempest-MigrationsAdminTest-1704279807-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8a06576c8c42481181150c5f2af7abeb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5da9250ce8864797964bc462fe88987a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61995) authorize /opt/stack/nova/nova/policy.py:203}} [ 530.582845] env[61995]: DEBUG oslo_vmware.api [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Task: {'id': task-378073, 'name': ReconfigVM_Task, 'duration_secs': 0.284734} completed successfully. {{(pid=61995) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 530.583409] env[61995]: DEBUG nova.virt.vmwareapi.volumeops [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] [instance: 1bf7f97f-ccfe-4ee0-a6fb-2234769c4b62] Reconfigured VM instance instance-00000001 to attach disk [datastore1] 1bf7f97f-ccfe-4ee0-a6fb-2234769c4b62/1bf7f97f-ccfe-4ee0-a6fb-2234769c4b62.vmdk or device None with type sparse {{(pid=61995) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 530.586967] env[61995]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6ed33aa0-08e8-4873-812f-088064d4d83a {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.595145] env[61995]: DEBUG oslo_vmware.api [None req-220a2570-a212-490a-b3c7-e114abe2c150 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] Task: {'id': task-378074, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.096961} completed successfully. {{(pid=61995) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 530.596590] env[61995]: DEBUG nova.virt.vmwareapi.vmops [None req-220a2570-a212-490a-b3c7-e114abe2c150 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] [instance: 5527576a-d56d-42c3-a7f7-02c66c0d1b3d] Extended root virtual disk {{(pid=61995) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 530.597046] env[61995]: DEBUG oslo_vmware.api [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Waiting for the task: (returnval){ [ 530.597046] env[61995]: value = "task-378075" [ 530.597046] env[61995]: _type = "Task" [ 530.597046] env[61995]: } to complete. {{(pid=61995) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 530.597988] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37dbb34c-f604-4e60-9d2c-191ebe35671e {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.616899] env[61995]: DEBUG oslo_vmware.api [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Task: {'id': task-378075, 'name': Rename_Task} progress is 6%. {{(pid=61995) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 530.627714] env[61995]: DEBUG nova.virt.vmwareapi.volumeops [None req-220a2570-a212-490a-b3c7-e114abe2c150 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] [instance: 5527576a-d56d-42c3-a7f7-02c66c0d1b3d] Reconfiguring VM instance instance-00000002 to attach disk [datastore1] 5527576a-d56d-42c3-a7f7-02c66c0d1b3d/5527576a-d56d-42c3-a7f7-02c66c0d1b3d.vmdk or device None with type sparse {{(pid=61995) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 530.628267] env[61995]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e8792eac-95a1-427c-a173-38ed89ca9f8e {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.649329] env[61995]: DEBUG oslo_vmware.api [None req-220a2570-a212-490a-b3c7-e114abe2c150 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] Waiting for the task: (returnval){ [ 530.649329] env[61995]: value = "task-378076" [ 530.649329] env[61995]: _type = "Task" [ 530.649329] env[61995]: } to complete. {{(pid=61995) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 530.659554] env[61995]: DEBUG oslo_vmware.api [None req-220a2570-a212-490a-b3c7-e114abe2c150 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] Task: {'id': task-378076, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61995) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 530.853511] env[61995]: DEBUG nova.network.neutron [None req-e7c4bbb5-54e6-4867-acb4-2fd4f1abacca tempest-FloatingIPsAssociationNegativeTestJSON-1491050792 tempest-FloatingIPsAssociationNegativeTestJSON-1491050792-project-member] [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] Instance cache missing network info. {{(pid=61995) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 530.869781] env[61995]: DEBUG nova.compute.manager [None req-ea640fa2-eaf2-48c6-9579-d88038be7ec7 tempest-MigrationsAdminTest-1704279807 tempest-MigrationsAdminTest-1704279807-project-member] [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] Start building block device mappings for instance. {{(pid=61995) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 530.869781] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-ea640fa2-eaf2-48c6-9579-d88038be7ec7 tempest-MigrationsAdminTest-1704279807 tempest-MigrationsAdminTest-1704279807-project-member] Expecting reply to msg 3af5e7feffb24ff28de8c65d1c2fae22 in queue reply_757213bc08bb49dab178826d88b76f40 [ 530.913002] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3af5e7feffb24ff28de8c65d1c2fae22 [ 530.982832] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97be5d7e-28ee-4388-9cc3-57bbcdc3620f {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.990157] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f37ea29b-0a81-4fcb-810b-a9f52ce5c1d0 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 531.027444] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d965aa5-f3d3-4474-a619-6291d02d77f0 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 531.034940] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e527c95-ad87-4a5a-8d03-b1538d9fd23d {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 531.048916] env[61995]: DEBUG nova.compute.provider_tree [None req-f3a8686d-cadc-4239-bfc5-8c2bb0366eb3 tempest-ServerDiagnosticsTest-1806019006 tempest-ServerDiagnosticsTest-1806019006-project-member] Inventory has not changed in ProviderTree for provider: c68e615f-c0c6-4278-bb7b-a579e68878a7 {{(pid=61995) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 531.049592] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-f3a8686d-cadc-4239-bfc5-8c2bb0366eb3 tempest-ServerDiagnosticsTest-1806019006 tempest-ServerDiagnosticsTest-1806019006-project-member] Expecting reply to msg 57e496ed69c2473d91b8ef0700d59b10 in queue reply_757213bc08bb49dab178826d88b76f40 [ 531.058310] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 57e496ed69c2473d91b8ef0700d59b10 [ 531.111809] env[61995]: DEBUG oslo_vmware.api [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Task: {'id': task-378075, 'name': Rename_Task, 'duration_secs': 0.13038} completed successfully. {{(pid=61995) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 531.112852] env[61995]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] [instance: 1bf7f97f-ccfe-4ee0-a6fb-2234769c4b62] Powering on the VM {{(pid=61995) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 531.112852] env[61995]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-609d8307-e166-44dd-8f68-8b1c4714fded {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 531.128364] env[61995]: DEBUG oslo_vmware.api [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Waiting for the task: (returnval){ [ 531.128364] env[61995]: value = "task-378077" [ 531.128364] env[61995]: _type = "Task" [ 531.128364] env[61995]: } to complete. {{(pid=61995) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 531.144655] env[61995]: DEBUG oslo_vmware.api [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Task: {'id': task-378077, 'name': PowerOnVM_Task} progress is 33%. {{(pid=61995) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 531.158157] env[61995]: DEBUG oslo_vmware.api [None req-220a2570-a212-490a-b3c7-e114abe2c150 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] Task: {'id': task-378076, 'name': ReconfigVM_Task, 'duration_secs': 0.27109} completed successfully. {{(pid=61995) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 531.158436] env[61995]: DEBUG nova.virt.vmwareapi.volumeops [None req-220a2570-a212-490a-b3c7-e114abe2c150 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] [instance: 5527576a-d56d-42c3-a7f7-02c66c0d1b3d] Reconfigured VM instance instance-00000002 to attach disk [datastore1] 5527576a-d56d-42c3-a7f7-02c66c0d1b3d/5527576a-d56d-42c3-a7f7-02c66c0d1b3d.vmdk or device None with type sparse {{(pid=61995) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 531.159029] env[61995]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c764a54e-f3a2-47e3-9421-4785c9ecfcd4 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 531.167252] env[61995]: DEBUG oslo_vmware.api [None req-220a2570-a212-490a-b3c7-e114abe2c150 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] Waiting for the task: (returnval){ [ 531.167252] env[61995]: value = "task-378078" [ 531.167252] env[61995]: _type = "Task" [ 531.167252] env[61995]: } to complete. {{(pid=61995) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 531.177670] env[61995]: DEBUG oslo_vmware.api [None req-220a2570-a212-490a-b3c7-e114abe2c150 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] Task: {'id': task-378078, 'name': Rename_Task} progress is 0%. {{(pid=61995) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 531.258428] env[61995]: DEBUG nova.network.neutron [None req-e7c4bbb5-54e6-4867-acb4-2fd4f1abacca tempest-FloatingIPsAssociationNegativeTestJSON-1491050792 tempest-FloatingIPsAssociationNegativeTestJSON-1491050792-project-member] [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] Updating instance_info_cache with network_info: [] {{(pid=61995) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 531.259066] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-e7c4bbb5-54e6-4867-acb4-2fd4f1abacca tempest-FloatingIPsAssociationNegativeTestJSON-1491050792 tempest-FloatingIPsAssociationNegativeTestJSON-1491050792-project-member] Expecting reply to msg 6ca19d618d19436d9b7c68ffb1efa1a9 in queue reply_757213bc08bb49dab178826d88b76f40 [ 531.269311] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6ca19d618d19436d9b7c68ffb1efa1a9 [ 531.374142] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-ea640fa2-eaf2-48c6-9579-d88038be7ec7 tempest-MigrationsAdminTest-1704279807 tempest-MigrationsAdminTest-1704279807-project-member] Expecting reply to msg 8eb5e9960f3a4bd99209857b30bafb80 in queue reply_757213bc08bb49dab178826d88b76f40 [ 531.421264] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8eb5e9960f3a4bd99209857b30bafb80 [ 531.552764] env[61995]: DEBUG nova.scheduler.client.report [None req-f3a8686d-cadc-4239-bfc5-8c2bb0366eb3 tempest-ServerDiagnosticsTest-1806019006 tempest-ServerDiagnosticsTest-1806019006-project-member] Inventory has not changed for provider c68e615f-c0c6-4278-bb7b-a579e68878a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61995) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 531.555148] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-f3a8686d-cadc-4239-bfc5-8c2bb0366eb3 tempest-ServerDiagnosticsTest-1806019006 tempest-ServerDiagnosticsTest-1806019006-project-member] Expecting reply to msg 8629316f01434ffebfc91860517c1a35 in queue reply_757213bc08bb49dab178826d88b76f40 [ 531.571511] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8629316f01434ffebfc91860517c1a35 [ 531.638531] env[61995]: DEBUG oslo_vmware.api [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Task: {'id': task-378077, 'name': PowerOnVM_Task, 'duration_secs': 0.44471} completed successfully. {{(pid=61995) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 531.638797] env[61995]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] [instance: 1bf7f97f-ccfe-4ee0-a6fb-2234769c4b62] Powered on the VM {{(pid=61995) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 531.639217] env[61995]: INFO nova.compute.manager [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] [instance: 1bf7f97f-ccfe-4ee0-a6fb-2234769c4b62] Took 9.44 seconds to spawn the instance on the hypervisor. [ 531.639491] env[61995]: DEBUG nova.compute.manager [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] [instance: 1bf7f97f-ccfe-4ee0-a6fb-2234769c4b62] Checking state {{(pid=61995) _get_power_state /opt/stack/nova/nova/compute/manager.py:1800}} [ 531.640329] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6d3ddd6-52b4-4e12-ab52-88c2102cb6c9 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 531.652903] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Expecting reply to msg f3a562fc3f6c46c59c6855d21f19e560 in queue reply_757213bc08bb49dab178826d88b76f40 [ 531.679524] env[61995]: DEBUG oslo_vmware.api [None req-220a2570-a212-490a-b3c7-e114abe2c150 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] Task: {'id': task-378078, 'name': Rename_Task, 'duration_secs': 0.15718} completed successfully. {{(pid=61995) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 531.679524] env[61995]: DEBUG nova.virt.vmwareapi.vm_util [None req-220a2570-a212-490a-b3c7-e114abe2c150 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] [instance: 5527576a-d56d-42c3-a7f7-02c66c0d1b3d] Powering on the VM {{(pid=61995) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 531.679524] env[61995]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d1d5c482-1fb6-4744-a7d5-d3aacebcfd12 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 531.686953] env[61995]: DEBUG oslo_vmware.api [None req-220a2570-a212-490a-b3c7-e114abe2c150 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] Waiting for the task: (returnval){ [ 531.686953] env[61995]: value = "task-378079" [ 531.686953] env[61995]: _type = "Task" [ 531.686953] env[61995]: } to complete. {{(pid=61995) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 531.696399] env[61995]: DEBUG oslo_vmware.api [None req-220a2570-a212-490a-b3c7-e114abe2c150 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] Task: {'id': task-378079, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61995) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 531.761501] env[61995]: DEBUG oslo_concurrency.lockutils [None req-e7c4bbb5-54e6-4867-acb4-2fd4f1abacca tempest-FloatingIPsAssociationNegativeTestJSON-1491050792 tempest-FloatingIPsAssociationNegativeTestJSON-1491050792-project-member] Releasing lock "refresh_cache-cd3d1cbb-be77-4fd0-9666-0d544a19a16d" {{(pid=61995) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 531.761739] env[61995]: DEBUG nova.compute.manager [None req-e7c4bbb5-54e6-4867-acb4-2fd4f1abacca tempest-FloatingIPsAssociationNegativeTestJSON-1491050792 tempest-FloatingIPsAssociationNegativeTestJSON-1491050792-project-member] [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] Start destroying the instance on the hypervisor. {{(pid=61995) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 531.761956] env[61995]: DEBUG nova.virt.vmwareapi.vmops [None req-e7c4bbb5-54e6-4867-acb4-2fd4f1abacca tempest-FloatingIPsAssociationNegativeTestJSON-1491050792 tempest-FloatingIPsAssociationNegativeTestJSON-1491050792-project-member] [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] Destroying instance {{(pid=61995) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 531.762280] env[61995]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f252c7bf-d611-409b-add1-97459dd2c422 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 531.771140] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb059264-083d-4cd8-8488-510eb3a4d52f {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 531.791256] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f3a562fc3f6c46c59c6855d21f19e560 [ 531.799426] env[61995]: WARNING nova.virt.vmwareapi.vmops [None req-e7c4bbb5-54e6-4867-acb4-2fd4f1abacca tempest-FloatingIPsAssociationNegativeTestJSON-1491050792 tempest-FloatingIPsAssociationNegativeTestJSON-1491050792-project-member] [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance cd3d1cbb-be77-4fd0-9666-0d544a19a16d could not be found. [ 531.799426] env[61995]: DEBUG nova.virt.vmwareapi.vmops [None req-e7c4bbb5-54e6-4867-acb4-2fd4f1abacca tempest-FloatingIPsAssociationNegativeTestJSON-1491050792 tempest-FloatingIPsAssociationNegativeTestJSON-1491050792-project-member] [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] Instance destroyed {{(pid=61995) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 531.799426] env[61995]: INFO nova.compute.manager [None req-e7c4bbb5-54e6-4867-acb4-2fd4f1abacca tempest-FloatingIPsAssociationNegativeTestJSON-1491050792 tempest-FloatingIPsAssociationNegativeTestJSON-1491050792-project-member] [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] Took 0.04 seconds to destroy the instance on the hypervisor. [ 531.799426] env[61995]: DEBUG oslo.service.loopingcall [None req-e7c4bbb5-54e6-4867-acb4-2fd4f1abacca tempest-FloatingIPsAssociationNegativeTestJSON-1491050792 tempest-FloatingIPsAssociationNegativeTestJSON-1491050792-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61995) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 531.800710] env[61995]: DEBUG nova.compute.manager [-] [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] Deallocating network for instance {{(pid=61995) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2298}} [ 531.801054] env[61995]: DEBUG nova.network.neutron [-] [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] deallocate_for_instance() {{(pid=61995) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 531.843248] env[61995]: DEBUG nova.network.neutron [None req-ea640fa2-eaf2-48c6-9579-d88038be7ec7 tempest-MigrationsAdminTest-1704279807 tempest-MigrationsAdminTest-1704279807-project-member] [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] Successfully created port: a37484e9-c5c2-4756-a547-b348cb68b24b {{(pid=61995) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 531.852146] env[61995]: DEBUG nova.network.neutron [-] [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] Instance cache missing network info. {{(pid=61995) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 531.852887] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 6ec495227cdd4123a757a76d283c7803 in queue reply_757213bc08bb49dab178826d88b76f40 [ 531.861451] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6ec495227cdd4123a757a76d283c7803 [ 531.879404] env[61995]: DEBUG nova.compute.manager [None req-ea640fa2-eaf2-48c6-9579-d88038be7ec7 tempest-MigrationsAdminTest-1704279807 tempest-MigrationsAdminTest-1704279807-project-member] [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] Start spawning the instance on the hypervisor. {{(pid=61995) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 531.909532] env[61995]: DEBUG nova.virt.hardware [None req-ea640fa2-eaf2-48c6-9579-d88038be7ec7 tempest-MigrationsAdminTest-1704279807 tempest-MigrationsAdminTest-1704279807-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-06T07:57:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-06T07:57:11Z,direct_url=,disk_format='vmdk',id=947125f0-9664-40eb-953e-b1373b076c9f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3dc1e6434ef44fd0be01fd4ebf2b62ef',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-06T07:57:12Z,virtual_size=,visibility=), allow threads: False {{(pid=61995) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 531.909820] env[61995]: DEBUG nova.virt.hardware [None req-ea640fa2-eaf2-48c6-9579-d88038be7ec7 tempest-MigrationsAdminTest-1704279807 tempest-MigrationsAdminTest-1704279807-project-member] Flavor limits 0:0:0 {{(pid=61995) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 531.910044] env[61995]: DEBUG nova.virt.hardware [None req-ea640fa2-eaf2-48c6-9579-d88038be7ec7 tempest-MigrationsAdminTest-1704279807 tempest-MigrationsAdminTest-1704279807-project-member] Image limits 0:0:0 {{(pid=61995) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 531.910276] env[61995]: DEBUG nova.virt.hardware [None req-ea640fa2-eaf2-48c6-9579-d88038be7ec7 tempest-MigrationsAdminTest-1704279807 tempest-MigrationsAdminTest-1704279807-project-member] Flavor pref 0:0:0 {{(pid=61995) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 531.910482] env[61995]: DEBUG nova.virt.hardware [None req-ea640fa2-eaf2-48c6-9579-d88038be7ec7 tempest-MigrationsAdminTest-1704279807 tempest-MigrationsAdminTest-1704279807-project-member] Image pref 0:0:0 {{(pid=61995) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 531.910629] env[61995]: DEBUG nova.virt.hardware [None req-ea640fa2-eaf2-48c6-9579-d88038be7ec7 tempest-MigrationsAdminTest-1704279807 tempest-MigrationsAdminTest-1704279807-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61995) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 531.910851] env[61995]: DEBUG nova.virt.hardware [None req-ea640fa2-eaf2-48c6-9579-d88038be7ec7 tempest-MigrationsAdminTest-1704279807 tempest-MigrationsAdminTest-1704279807-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61995) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 531.911020] env[61995]: DEBUG nova.virt.hardware [None req-ea640fa2-eaf2-48c6-9579-d88038be7ec7 tempest-MigrationsAdminTest-1704279807 tempest-MigrationsAdminTest-1704279807-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61995) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 531.911179] env[61995]: DEBUG nova.virt.hardware [None req-ea640fa2-eaf2-48c6-9579-d88038be7ec7 tempest-MigrationsAdminTest-1704279807 tempest-MigrationsAdminTest-1704279807-project-member] Got 1 possible topologies {{(pid=61995) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 531.911329] env[61995]: DEBUG nova.virt.hardware [None req-ea640fa2-eaf2-48c6-9579-d88038be7ec7 tempest-MigrationsAdminTest-1704279807 tempest-MigrationsAdminTest-1704279807-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61995) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 531.911491] env[61995]: DEBUG nova.virt.hardware [None req-ea640fa2-eaf2-48c6-9579-d88038be7ec7 tempest-MigrationsAdminTest-1704279807 tempest-MigrationsAdminTest-1704279807-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61995) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 531.912390] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a9ab4c3-fd72-4966-a137-9349009cffe6 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 531.920540] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f030c8bf-2dca-4476-9a29-8af674da13da {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 532.064396] env[61995]: DEBUG oslo_concurrency.lockutils [None req-f3a8686d-cadc-4239-bfc5-8c2bb0366eb3 tempest-ServerDiagnosticsTest-1806019006 tempest-ServerDiagnosticsTest-1806019006-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.217s {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 532.064905] env[61995]: DEBUG nova.compute.manager [None req-f3a8686d-cadc-4239-bfc5-8c2bb0366eb3 tempest-ServerDiagnosticsTest-1806019006 tempest-ServerDiagnosticsTest-1806019006-project-member] [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] Start building networks asynchronously for instance. {{(pid=61995) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 532.066664] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-f3a8686d-cadc-4239-bfc5-8c2bb0366eb3 tempest-ServerDiagnosticsTest-1806019006 tempest-ServerDiagnosticsTest-1806019006-project-member] Expecting reply to msg 1c7eae1884004d61b3c1e5d7e70403e5 in queue reply_757213bc08bb49dab178826d88b76f40 [ 532.070197] env[61995]: DEBUG oslo_concurrency.lockutils [None req-b5402465-5749-4221-a8d2-1de6379c27b6 tempest-VolumesAdminNegativeTest-869695280 tempest-VolumesAdminNegativeTest-869695280-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.911s {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 532.070197] env[61995]: INFO nova.compute.claims [None req-b5402465-5749-4221-a8d2-1de6379c27b6 tempest-VolumesAdminNegativeTest-869695280 tempest-VolumesAdminNegativeTest-869695280-project-member] [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 532.071939] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-b5402465-5749-4221-a8d2-1de6379c27b6 tempest-VolumesAdminNegativeTest-869695280 tempest-VolumesAdminNegativeTest-869695280-project-member] Expecting reply to msg 90b6ea269a82471bb25872c698ef9490 in queue reply_757213bc08bb49dab178826d88b76f40 [ 532.098818] env[61995]: DEBUG nova.compute.manager [req-0f5e573e-658d-40a9-ba11-fbf994eee975 req-2e8506df-94e0-4934-9138-dc2dc37fa61e service nova] [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] Received event network-changed-ef27cf19-df34-49d6-b558-2f4d11dee5a8 {{(pid=61995) external_instance_event /opt/stack/nova/nova/compute/manager.py:11144}} [ 532.098980] env[61995]: DEBUG nova.compute.manager [req-0f5e573e-658d-40a9-ba11-fbf994eee975 req-2e8506df-94e0-4934-9138-dc2dc37fa61e service nova] [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] Refreshing instance network info cache due to event network-changed-ef27cf19-df34-49d6-b558-2f4d11dee5a8. {{(pid=61995) external_instance_event /opt/stack/nova/nova/compute/manager.py:11149}} [ 532.099186] env[61995]: DEBUG oslo_concurrency.lockutils [req-0f5e573e-658d-40a9-ba11-fbf994eee975 req-2e8506df-94e0-4934-9138-dc2dc37fa61e service nova] Acquiring lock "refresh_cache-cd3d1cbb-be77-4fd0-9666-0d544a19a16d" {{(pid=61995) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 532.099319] env[61995]: DEBUG oslo_concurrency.lockutils [req-0f5e573e-658d-40a9-ba11-fbf994eee975 req-2e8506df-94e0-4934-9138-dc2dc37fa61e service nova] Acquired lock "refresh_cache-cd3d1cbb-be77-4fd0-9666-0d544a19a16d" {{(pid=61995) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 532.099470] env[61995]: DEBUG nova.network.neutron [req-0f5e573e-658d-40a9-ba11-fbf994eee975 req-2e8506df-94e0-4934-9138-dc2dc37fa61e service nova] [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] Refreshing network info cache for port ef27cf19-df34-49d6-b558-2f4d11dee5a8 {{(pid=61995) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 532.099906] env[61995]: INFO oslo_messaging._drivers.amqpdriver [req-0f5e573e-658d-40a9-ba11-fbf994eee975 req-2e8506df-94e0-4934-9138-dc2dc37fa61e service nova] Expecting reply to msg 31695a850a81440194e890f283c25a71 in queue reply_757213bc08bb49dab178826d88b76f40 [ 532.114263] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 31695a850a81440194e890f283c25a71 [ 532.125808] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 90b6ea269a82471bb25872c698ef9490 [ 532.127789] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1c7eae1884004d61b3c1e5d7e70403e5 [ 532.160509] env[61995]: INFO nova.compute.manager [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] [instance: 1bf7f97f-ccfe-4ee0-a6fb-2234769c4b62] Took 14.17 seconds to build instance. [ 532.160841] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Expecting reply to msg c53c26959dbe404e8867a62cba57a7e8 in queue reply_757213bc08bb49dab178826d88b76f40 [ 532.176760] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c53c26959dbe404e8867a62cba57a7e8 [ 532.198783] env[61995]: DEBUG oslo_vmware.api [None req-220a2570-a212-490a-b3c7-e114abe2c150 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] Task: {'id': task-378079, 'name': PowerOnVM_Task} progress is 64%. {{(pid=61995) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 532.355427] env[61995]: DEBUG nova.network.neutron [-] [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] Updating instance_info_cache with network_info: [] {{(pid=61995) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 532.355682] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 31c28b8e8de54cadb56f2e58e61153ac in queue reply_757213bc08bb49dab178826d88b76f40 [ 532.370618] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 31c28b8e8de54cadb56f2e58e61153ac [ 532.459653] env[61995]: ERROR nova.compute.manager [None req-6edc7ef5-8dea-41f2-9cd6-a09a4eb02c4e tempest-ServerExternalEventsTest-367181382 tempest-ServerExternalEventsTest-367181382-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port b996cfab-151e-438f-929d-3393ad4e64f7, please check neutron logs for more information. [ 532.459653] env[61995]: ERROR nova.compute.manager Traceback (most recent call last): [ 532.459653] env[61995]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1998, in _allocate_network_async [ 532.459653] env[61995]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 532.459653] env[61995]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 532.459653] env[61995]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 532.459653] env[61995]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 532.459653] env[61995]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 532.459653] env[61995]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 532.459653] env[61995]: ERROR nova.compute.manager self.force_reraise() [ 532.459653] env[61995]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 532.459653] env[61995]: ERROR nova.compute.manager raise self.value [ 532.459653] env[61995]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 532.459653] env[61995]: ERROR nova.compute.manager updated_port = self._update_port( [ 532.459653] env[61995]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 532.459653] env[61995]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 532.460253] env[61995]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 532.460253] env[61995]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 532.460253] env[61995]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port b996cfab-151e-438f-929d-3393ad4e64f7, please check neutron logs for more information. [ 532.460253] env[61995]: ERROR nova.compute.manager [ 532.460253] env[61995]: Traceback (most recent call last): [ 532.460253] env[61995]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 532.460253] env[61995]: listener.cb(fileno) [ 532.460253] env[61995]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 532.460253] env[61995]: result = function(*args, **kwargs) [ 532.460253] env[61995]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 532.460253] env[61995]: return func(*args, **kwargs) [ 532.460253] env[61995]: File "/opt/stack/nova/nova/compute/manager.py", line 2020, in _allocate_network_async [ 532.460253] env[61995]: raise e [ 532.460253] env[61995]: File "/opt/stack/nova/nova/compute/manager.py", line 1998, in _allocate_network_async [ 532.460253] env[61995]: nwinfo = self.network_api.allocate_for_instance( [ 532.460253] env[61995]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 532.460253] env[61995]: created_port_ids = self._update_ports_for_instance( [ 532.460253] env[61995]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 532.460253] env[61995]: with excutils.save_and_reraise_exception(): [ 532.460253] env[61995]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 532.460253] env[61995]: self.force_reraise() [ 532.460253] env[61995]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 532.460253] env[61995]: raise self.value [ 532.460253] env[61995]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 532.460253] env[61995]: updated_port = self._update_port( [ 532.460253] env[61995]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 532.460253] env[61995]: _ensure_no_port_binding_failure(port) [ 532.460253] env[61995]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 532.460253] env[61995]: raise exception.PortBindingFailed(port_id=port['id']) [ 532.460966] env[61995]: nova.exception.PortBindingFailed: Binding failed for port b996cfab-151e-438f-929d-3393ad4e64f7, please check neutron logs for more information. [ 532.460966] env[61995]: Removing descriptor: 17 [ 532.460966] env[61995]: ERROR nova.compute.manager [None req-6edc7ef5-8dea-41f2-9cd6-a09a4eb02c4e tempest-ServerExternalEventsTest-367181382 tempest-ServerExternalEventsTest-367181382-project-member] [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port b996cfab-151e-438f-929d-3393ad4e64f7, please check neutron logs for more information. [ 532.460966] env[61995]: ERROR nova.compute.manager [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] Traceback (most recent call last): [ 532.460966] env[61995]: ERROR nova.compute.manager [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] File "/opt/stack/nova/nova/compute/manager.py", line 2903, in _build_resources [ 532.460966] env[61995]: ERROR nova.compute.manager [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] yield resources [ 532.460966] env[61995]: ERROR nova.compute.manager [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] File "/opt/stack/nova/nova/compute/manager.py", line 2650, in _build_and_run_instance [ 532.460966] env[61995]: ERROR nova.compute.manager [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] self.driver.spawn(context, instance, image_meta, [ 532.460966] env[61995]: ERROR nova.compute.manager [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 532.460966] env[61995]: ERROR nova.compute.manager [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] self._vmops.spawn(context, instance, image_meta, injected_files, [ 532.460966] env[61995]: ERROR nova.compute.manager [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 532.460966] env[61995]: ERROR nova.compute.manager [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] vm_ref = self.build_virtual_machine(instance, [ 532.461273] env[61995]: ERROR nova.compute.manager [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 532.461273] env[61995]: ERROR nova.compute.manager [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] vif_infos = vmwarevif.get_vif_info(self._session, [ 532.461273] env[61995]: ERROR nova.compute.manager [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 532.461273] env[61995]: ERROR nova.compute.manager [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] for vif in network_info: [ 532.461273] env[61995]: ERROR nova.compute.manager [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 532.461273] env[61995]: ERROR nova.compute.manager [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] return self._sync_wrapper(fn, *args, **kwargs) [ 532.461273] env[61995]: ERROR nova.compute.manager [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 532.461273] env[61995]: ERROR nova.compute.manager [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] self.wait() [ 532.461273] env[61995]: ERROR nova.compute.manager [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 532.461273] env[61995]: ERROR nova.compute.manager [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] self[:] = self._gt.wait() [ 532.461273] env[61995]: ERROR nova.compute.manager [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 532.461273] env[61995]: ERROR nova.compute.manager [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] return self._exit_event.wait() [ 532.461273] env[61995]: ERROR nova.compute.manager [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 532.461611] env[61995]: ERROR nova.compute.manager [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] result = hub.switch() [ 532.461611] env[61995]: ERROR nova.compute.manager [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 532.461611] env[61995]: ERROR nova.compute.manager [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] return self.greenlet.switch() [ 532.461611] env[61995]: ERROR nova.compute.manager [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 532.461611] env[61995]: ERROR nova.compute.manager [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] result = function(*args, **kwargs) [ 532.461611] env[61995]: ERROR nova.compute.manager [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 532.461611] env[61995]: ERROR nova.compute.manager [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] return func(*args, **kwargs) [ 532.461611] env[61995]: ERROR nova.compute.manager [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] File "/opt/stack/nova/nova/compute/manager.py", line 2020, in _allocate_network_async [ 532.461611] env[61995]: ERROR nova.compute.manager [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] raise e [ 532.461611] env[61995]: ERROR nova.compute.manager [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] File "/opt/stack/nova/nova/compute/manager.py", line 1998, in _allocate_network_async [ 532.461611] env[61995]: ERROR nova.compute.manager [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] nwinfo = self.network_api.allocate_for_instance( [ 532.461611] env[61995]: ERROR nova.compute.manager [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 532.461611] env[61995]: ERROR nova.compute.manager [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] created_port_ids = self._update_ports_for_instance( [ 532.461952] env[61995]: ERROR nova.compute.manager [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 532.461952] env[61995]: ERROR nova.compute.manager [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] with excutils.save_and_reraise_exception(): [ 532.461952] env[61995]: ERROR nova.compute.manager [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 532.461952] env[61995]: ERROR nova.compute.manager [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] self.force_reraise() [ 532.461952] env[61995]: ERROR nova.compute.manager [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 532.461952] env[61995]: ERROR nova.compute.manager [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] raise self.value [ 532.461952] env[61995]: ERROR nova.compute.manager [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 532.461952] env[61995]: ERROR nova.compute.manager [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] updated_port = self._update_port( [ 532.461952] env[61995]: ERROR nova.compute.manager [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 532.461952] env[61995]: ERROR nova.compute.manager [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] _ensure_no_port_binding_failure(port) [ 532.461952] env[61995]: ERROR nova.compute.manager [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 532.461952] env[61995]: ERROR nova.compute.manager [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] raise exception.PortBindingFailed(port_id=port['id']) [ 532.462265] env[61995]: ERROR nova.compute.manager [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] nova.exception.PortBindingFailed: Binding failed for port b996cfab-151e-438f-929d-3393ad4e64f7, please check neutron logs for more information. [ 532.462265] env[61995]: ERROR nova.compute.manager [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] [ 532.462265] env[61995]: INFO nova.compute.manager [None req-6edc7ef5-8dea-41f2-9cd6-a09a4eb02c4e tempest-ServerExternalEventsTest-367181382 tempest-ServerExternalEventsTest-367181382-project-member] [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] Terminating instance [ 532.463377] env[61995]: DEBUG oslo_concurrency.lockutils [None req-6edc7ef5-8dea-41f2-9cd6-a09a4eb02c4e tempest-ServerExternalEventsTest-367181382 tempest-ServerExternalEventsTest-367181382-project-member] Acquiring lock "refresh_cache-4c0dd905-f751-4cb4-9be1-ef06518990f8" {{(pid=61995) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 532.463540] env[61995]: DEBUG oslo_concurrency.lockutils [None req-6edc7ef5-8dea-41f2-9cd6-a09a4eb02c4e tempest-ServerExternalEventsTest-367181382 tempest-ServerExternalEventsTest-367181382-project-member] Acquired lock "refresh_cache-4c0dd905-f751-4cb4-9be1-ef06518990f8" {{(pid=61995) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 532.463698] env[61995]: DEBUG nova.network.neutron [None req-6edc7ef5-8dea-41f2-9cd6-a09a4eb02c4e tempest-ServerExternalEventsTest-367181382 tempest-ServerExternalEventsTest-367181382-project-member] [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] Building network info cache for instance {{(pid=61995) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 532.464149] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-6edc7ef5-8dea-41f2-9cd6-a09a4eb02c4e tempest-ServerExternalEventsTest-367181382 tempest-ServerExternalEventsTest-367181382-project-member] Expecting reply to msg 7f14b503b58c46ed9e47bffab415e92f in queue reply_757213bc08bb49dab178826d88b76f40 [ 532.471720] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7f14b503b58c46ed9e47bffab415e92f [ 532.578908] env[61995]: DEBUG nova.compute.utils [None req-f3a8686d-cadc-4239-bfc5-8c2bb0366eb3 tempest-ServerDiagnosticsTest-1806019006 tempest-ServerDiagnosticsTest-1806019006-project-member] Using /dev/sd instead of None {{(pid=61995) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 532.578908] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-f3a8686d-cadc-4239-bfc5-8c2bb0366eb3 tempest-ServerDiagnosticsTest-1806019006 tempest-ServerDiagnosticsTest-1806019006-project-member] Expecting reply to msg e9fdd57a0a5d4db3b9f0893802589c69 in queue reply_757213bc08bb49dab178826d88b76f40 [ 532.578908] env[61995]: DEBUG nova.compute.manager [None req-f3a8686d-cadc-4239-bfc5-8c2bb0366eb3 tempest-ServerDiagnosticsTest-1806019006 tempest-ServerDiagnosticsTest-1806019006-project-member] [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] Allocating IP information in the background. {{(pid=61995) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1990}} [ 532.578908] env[61995]: DEBUG nova.network.neutron [None req-f3a8686d-cadc-4239-bfc5-8c2bb0366eb3 tempest-ServerDiagnosticsTest-1806019006 tempest-ServerDiagnosticsTest-1806019006-project-member] [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] allocate_for_instance() {{(pid=61995) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 532.582127] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-b5402465-5749-4221-a8d2-1de6379c27b6 tempest-VolumesAdminNegativeTest-869695280 tempest-VolumesAdminNegativeTest-869695280-project-member] Expecting reply to msg 86c25c65a2c0460c82ac923ab38037b5 in queue reply_757213bc08bb49dab178826d88b76f40 [ 532.592035] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 86c25c65a2c0460c82ac923ab38037b5 [ 532.592309] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e9fdd57a0a5d4db3b9f0893802589c69 [ 532.649170] env[61995]: DEBUG nova.network.neutron [req-0f5e573e-658d-40a9-ba11-fbf994eee975 req-2e8506df-94e0-4934-9138-dc2dc37fa61e service nova] [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] Instance cache missing network info. {{(pid=61995) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 532.662803] env[61995]: DEBUG oslo_concurrency.lockutils [None req-b3c32f4f-f611-4449-84d9-2cbbe563e167 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Lock "1bf7f97f-ccfe-4ee0-a6fb-2234769c4b62" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.676s {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 532.696033] env[61995]: DEBUG oslo_vmware.api [None req-220a2570-a212-490a-b3c7-e114abe2c150 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] Task: {'id': task-378079, 'name': PowerOnVM_Task, 'duration_secs': 0.781545} completed successfully. {{(pid=61995) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 532.696494] env[61995]: DEBUG nova.virt.vmwareapi.vm_util [None req-220a2570-a212-490a-b3c7-e114abe2c150 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] [instance: 5527576a-d56d-42c3-a7f7-02c66c0d1b3d] Powered on the VM {{(pid=61995) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 532.696801] env[61995]: INFO nova.compute.manager [None req-220a2570-a212-490a-b3c7-e114abe2c150 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] [instance: 5527576a-d56d-42c3-a7f7-02c66c0d1b3d] Took 7.99 seconds to spawn the instance on the hypervisor. [ 532.697402] env[61995]: DEBUG nova.compute.manager [None req-220a2570-a212-490a-b3c7-e114abe2c150 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] [instance: 5527576a-d56d-42c3-a7f7-02c66c0d1b3d] Checking state {{(pid=61995) _get_power_state /opt/stack/nova/nova/compute/manager.py:1800}} [ 532.698625] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f051c703-a90b-47b5-a360-630f791736f9 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 532.709929] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-220a2570-a212-490a-b3c7-e114abe2c150 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] Expecting reply to msg 5e99275e63bd40b5adc9d8877d36e19f in queue reply_757213bc08bb49dab178826d88b76f40 [ 532.767045] env[61995]: DEBUG nova.network.neutron [req-0f5e573e-658d-40a9-ba11-fbf994eee975 req-2e8506df-94e0-4934-9138-dc2dc37fa61e service nova] [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] Updating instance_info_cache with network_info: [] {{(pid=61995) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 532.767576] env[61995]: INFO oslo_messaging._drivers.amqpdriver [req-0f5e573e-658d-40a9-ba11-fbf994eee975 req-2e8506df-94e0-4934-9138-dc2dc37fa61e service nova] Expecting reply to msg cb0efd24a3be432687f555e106dd941b in queue reply_757213bc08bb49dab178826d88b76f40 [ 532.769375] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5e99275e63bd40b5adc9d8877d36e19f [ 532.777528] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cb0efd24a3be432687f555e106dd941b [ 532.801765] env[61995]: DEBUG nova.policy [None req-f3a8686d-cadc-4239-bfc5-8c2bb0366eb3 tempest-ServerDiagnosticsTest-1806019006 tempest-ServerDiagnosticsTest-1806019006-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e0c6070801d04486b651521936ffa6af', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '993c29fb8e0040548a2d8e69320e9727', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61995) authorize /opt/stack/nova/nova/policy.py:203}} [ 532.858332] env[61995]: INFO nova.compute.manager [-] [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] Took 1.06 seconds to deallocate network for instance. [ 532.861169] env[61995]: DEBUG nova.compute.claims [None req-e7c4bbb5-54e6-4867-acb4-2fd4f1abacca tempest-FloatingIPsAssociationNegativeTestJSON-1491050792 tempest-FloatingIPsAssociationNegativeTestJSON-1491050792-project-member] [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] Aborting claim: {{(pid=61995) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 532.861319] env[61995]: DEBUG oslo_concurrency.lockutils [None req-e7c4bbb5-54e6-4867-acb4-2fd4f1abacca tempest-FloatingIPsAssociationNegativeTestJSON-1491050792 tempest-FloatingIPsAssociationNegativeTestJSON-1491050792-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 533.087661] env[61995]: DEBUG nova.compute.manager [None req-f3a8686d-cadc-4239-bfc5-8c2bb0366eb3 tempest-ServerDiagnosticsTest-1806019006 tempest-ServerDiagnosticsTest-1806019006-project-member] [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] Start building block device mappings for instance. {{(pid=61995) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 533.089488] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-f3a8686d-cadc-4239-bfc5-8c2bb0366eb3 tempest-ServerDiagnosticsTest-1806019006 tempest-ServerDiagnosticsTest-1806019006-project-member] Expecting reply to msg a4e02456353f4be98134bd1f4c574e1b in queue reply_757213bc08bb49dab178826d88b76f40 [ 533.137663] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a4e02456353f4be98134bd1f4c574e1b [ 533.209231] env[61995]: DEBUG nova.network.neutron [None req-6edc7ef5-8dea-41f2-9cd6-a09a4eb02c4e tempest-ServerExternalEventsTest-367181382 tempest-ServerExternalEventsTest-367181382-project-member] [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] Instance cache missing network info. {{(pid=61995) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 533.220895] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60bc9ade-0bfa-4ae4-846c-d2acd1f10f44 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 533.227934] env[61995]: INFO nova.compute.manager [None req-220a2570-a212-490a-b3c7-e114abe2c150 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] [instance: 5527576a-d56d-42c3-a7f7-02c66c0d1b3d] Took 12.72 seconds to build instance. [ 533.228330] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-220a2570-a212-490a-b3c7-e114abe2c150 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] Expecting reply to msg c453d887bca54f0fae29c53434788c22 in queue reply_757213bc08bb49dab178826d88b76f40 [ 533.234744] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68272f39-f5f8-4f93-bd68-239df769564e {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 533.268211] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c453d887bca54f0fae29c53434788c22 [ 533.271330] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-682bb8c5-d9b6-4980-af49-98a0bf0ad78f {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 533.274015] env[61995]: DEBUG oslo_concurrency.lockutils [req-0f5e573e-658d-40a9-ba11-fbf994eee975 req-2e8506df-94e0-4934-9138-dc2dc37fa61e service nova] Releasing lock "refresh_cache-cd3d1cbb-be77-4fd0-9666-0d544a19a16d" {{(pid=61995) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 533.279826] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3259d5a9-e830-41f7-b68c-bcbbdf7881f1 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 533.304541] env[61995]: DEBUG nova.compute.provider_tree [None req-b5402465-5749-4221-a8d2-1de6379c27b6 tempest-VolumesAdminNegativeTest-869695280 tempest-VolumesAdminNegativeTest-869695280-project-member] Inventory has not changed in ProviderTree for provider: c68e615f-c0c6-4278-bb7b-a579e68878a7 {{(pid=61995) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 533.305031] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-b5402465-5749-4221-a8d2-1de6379c27b6 tempest-VolumesAdminNegativeTest-869695280 tempest-VolumesAdminNegativeTest-869695280-project-member] Expecting reply to msg d4a8c89fb7b3444eb8e9520d886a9c06 in queue reply_757213bc08bb49dab178826d88b76f40 [ 533.314514] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d4a8c89fb7b3444eb8e9520d886a9c06 [ 533.399299] env[61995]: DEBUG nova.network.neutron [None req-6edc7ef5-8dea-41f2-9cd6-a09a4eb02c4e tempest-ServerExternalEventsTest-367181382 tempest-ServerExternalEventsTest-367181382-project-member] [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] Updating instance_info_cache with network_info: [] {{(pid=61995) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 533.399873] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-6edc7ef5-8dea-41f2-9cd6-a09a4eb02c4e tempest-ServerExternalEventsTest-367181382 tempest-ServerExternalEventsTest-367181382-project-member] Expecting reply to msg 1e856668bb68473385e88f9fd5730fb8 in queue reply_757213bc08bb49dab178826d88b76f40 [ 533.411244] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1e856668bb68473385e88f9fd5730fb8 [ 533.514425] env[61995]: DEBUG nova.network.neutron [None req-f3a8686d-cadc-4239-bfc5-8c2bb0366eb3 tempest-ServerDiagnosticsTest-1806019006 tempest-ServerDiagnosticsTest-1806019006-project-member] [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] Successfully created port: 44c93bf1-8e65-4c5c-bbf7-fa9b74625351 {{(pid=61995) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 533.601990] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-f3a8686d-cadc-4239-bfc5-8c2bb0366eb3 tempest-ServerDiagnosticsTest-1806019006 tempest-ServerDiagnosticsTest-1806019006-project-member] Expecting reply to msg 576154d11f394a40a621019bbe4ab7b7 in queue reply_757213bc08bb49dab178826d88b76f40 [ 533.636404] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 576154d11f394a40a621019bbe4ab7b7 [ 533.730311] env[61995]: DEBUG oslo_concurrency.lockutils [None req-220a2570-a212-490a-b3c7-e114abe2c150 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] Lock "5527576a-d56d-42c3-a7f7-02c66c0d1b3d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.234s {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 533.807702] env[61995]: DEBUG nova.scheduler.client.report [None req-b5402465-5749-4221-a8d2-1de6379c27b6 tempest-VolumesAdminNegativeTest-869695280 tempest-VolumesAdminNegativeTest-869695280-project-member] Inventory has not changed for provider c68e615f-c0c6-4278-bb7b-a579e68878a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61995) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 533.810587] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-b5402465-5749-4221-a8d2-1de6379c27b6 tempest-VolumesAdminNegativeTest-869695280 tempest-VolumesAdminNegativeTest-869695280-project-member] Expecting reply to msg bc61d2cb54f84a5ea8c92957810358b7 in queue reply_757213bc08bb49dab178826d88b76f40 [ 533.827219] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bc61d2cb54f84a5ea8c92957810358b7 [ 533.901922] env[61995]: DEBUG oslo_concurrency.lockutils [None req-6edc7ef5-8dea-41f2-9cd6-a09a4eb02c4e tempest-ServerExternalEventsTest-367181382 tempest-ServerExternalEventsTest-367181382-project-member] Releasing lock "refresh_cache-4c0dd905-f751-4cb4-9be1-ef06518990f8" {{(pid=61995) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 533.902342] env[61995]: DEBUG nova.compute.manager [None req-6edc7ef5-8dea-41f2-9cd6-a09a4eb02c4e tempest-ServerExternalEventsTest-367181382 tempest-ServerExternalEventsTest-367181382-project-member] [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] Start destroying the instance on the hypervisor. {{(pid=61995) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 533.902537] env[61995]: DEBUG nova.virt.vmwareapi.vmops [None req-6edc7ef5-8dea-41f2-9cd6-a09a4eb02c4e tempest-ServerExternalEventsTest-367181382 tempest-ServerExternalEventsTest-367181382-project-member] [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] Destroying instance {{(pid=61995) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 533.902842] env[61995]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a039abe1-5a4e-49d6-a604-8d35c40c6d13 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 533.914315] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b0f1c2a-b451-4ed8-a552-0844f36d5d66 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 533.941431] env[61995]: WARNING nova.virt.vmwareapi.vmops [None req-6edc7ef5-8dea-41f2-9cd6-a09a4eb02c4e tempest-ServerExternalEventsTest-367181382 tempest-ServerExternalEventsTest-367181382-project-member] [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 4c0dd905-f751-4cb4-9be1-ef06518990f8 could not be found. [ 533.941654] env[61995]: DEBUG nova.virt.vmwareapi.vmops [None req-6edc7ef5-8dea-41f2-9cd6-a09a4eb02c4e tempest-ServerExternalEventsTest-367181382 tempest-ServerExternalEventsTest-367181382-project-member] [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] Instance destroyed {{(pid=61995) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 533.941975] env[61995]: INFO nova.compute.manager [None req-6edc7ef5-8dea-41f2-9cd6-a09a4eb02c4e tempest-ServerExternalEventsTest-367181382 tempest-ServerExternalEventsTest-367181382-project-member] [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] Took 0.04 seconds to destroy the instance on the hypervisor. [ 533.942254] env[61995]: DEBUG oslo.service.loopingcall [None req-6edc7ef5-8dea-41f2-9cd6-a09a4eb02c4e tempest-ServerExternalEventsTest-367181382 tempest-ServerExternalEventsTest-367181382-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61995) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 533.942475] env[61995]: DEBUG nova.compute.manager [-] [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] Deallocating network for instance {{(pid=61995) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2298}} [ 533.942581] env[61995]: DEBUG nova.network.neutron [-] [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] deallocate_for_instance() {{(pid=61995) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 534.025310] env[61995]: DEBUG nova.network.neutron [-] [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] Instance cache missing network info. {{(pid=61995) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 534.025310] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 661b00ef006d44e3919ee493755b0af3 in queue reply_757213bc08bb49dab178826d88b76f40 [ 534.035123] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 661b00ef006d44e3919ee493755b0af3 [ 534.104914] env[61995]: DEBUG nova.compute.manager [None req-f3a8686d-cadc-4239-bfc5-8c2bb0366eb3 tempest-ServerDiagnosticsTest-1806019006 tempest-ServerDiagnosticsTest-1806019006-project-member] [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] Start spawning the instance on the hypervisor. {{(pid=61995) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 534.134519] env[61995]: DEBUG nova.virt.hardware [None req-f3a8686d-cadc-4239-bfc5-8c2bb0366eb3 tempest-ServerDiagnosticsTest-1806019006 tempest-ServerDiagnosticsTest-1806019006-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-06T07:57:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-06T07:57:11Z,direct_url=,disk_format='vmdk',id=947125f0-9664-40eb-953e-b1373b076c9f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3dc1e6434ef44fd0be01fd4ebf2b62ef',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-06T07:57:12Z,virtual_size=,visibility=), allow threads: False {{(pid=61995) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 534.134833] env[61995]: DEBUG nova.virt.hardware [None req-f3a8686d-cadc-4239-bfc5-8c2bb0366eb3 tempest-ServerDiagnosticsTest-1806019006 tempest-ServerDiagnosticsTest-1806019006-project-member] Flavor limits 0:0:0 {{(pid=61995) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 534.135019] env[61995]: DEBUG nova.virt.hardware [None req-f3a8686d-cadc-4239-bfc5-8c2bb0366eb3 tempest-ServerDiagnosticsTest-1806019006 tempest-ServerDiagnosticsTest-1806019006-project-member] Image limits 0:0:0 {{(pid=61995) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 534.135227] env[61995]: DEBUG nova.virt.hardware [None req-f3a8686d-cadc-4239-bfc5-8c2bb0366eb3 tempest-ServerDiagnosticsTest-1806019006 tempest-ServerDiagnosticsTest-1806019006-project-member] Flavor pref 0:0:0 {{(pid=61995) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 534.135396] env[61995]: DEBUG nova.virt.hardware [None req-f3a8686d-cadc-4239-bfc5-8c2bb0366eb3 tempest-ServerDiagnosticsTest-1806019006 tempest-ServerDiagnosticsTest-1806019006-project-member] Image pref 0:0:0 {{(pid=61995) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 534.135542] env[61995]: DEBUG nova.virt.hardware [None req-f3a8686d-cadc-4239-bfc5-8c2bb0366eb3 tempest-ServerDiagnosticsTest-1806019006 tempest-ServerDiagnosticsTest-1806019006-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61995) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 534.135799] env[61995]: DEBUG nova.virt.hardware [None req-f3a8686d-cadc-4239-bfc5-8c2bb0366eb3 tempest-ServerDiagnosticsTest-1806019006 tempest-ServerDiagnosticsTest-1806019006-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61995) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 534.136046] env[61995]: DEBUG nova.virt.hardware [None req-f3a8686d-cadc-4239-bfc5-8c2bb0366eb3 tempest-ServerDiagnosticsTest-1806019006 tempest-ServerDiagnosticsTest-1806019006-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61995) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 534.136204] env[61995]: DEBUG nova.virt.hardware [None req-f3a8686d-cadc-4239-bfc5-8c2bb0366eb3 tempest-ServerDiagnosticsTest-1806019006 tempest-ServerDiagnosticsTest-1806019006-project-member] Got 1 possible topologies {{(pid=61995) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 534.136412] env[61995]: DEBUG nova.virt.hardware [None req-f3a8686d-cadc-4239-bfc5-8c2bb0366eb3 tempest-ServerDiagnosticsTest-1806019006 tempest-ServerDiagnosticsTest-1806019006-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61995) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 534.136612] env[61995]: DEBUG nova.virt.hardware [None req-f3a8686d-cadc-4239-bfc5-8c2bb0366eb3 tempest-ServerDiagnosticsTest-1806019006 tempest-ServerDiagnosticsTest-1806019006-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61995) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 534.137504] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10d29e35-278e-43be-bfa1-b171d4f5ce41 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.149435] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d791dc4-7ddb-4479-97cd-53ed7cdf47ee {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.313807] env[61995]: DEBUG oslo_concurrency.lockutils [None req-b5402465-5749-4221-a8d2-1de6379c27b6 tempest-VolumesAdminNegativeTest-869695280 tempest-VolumesAdminNegativeTest-869695280-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.246s {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 534.314315] env[61995]: DEBUG nova.compute.manager [None req-b5402465-5749-4221-a8d2-1de6379c27b6 tempest-VolumesAdminNegativeTest-869695280 tempest-VolumesAdminNegativeTest-869695280-project-member] [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] Start building networks asynchronously for instance. {{(pid=61995) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 534.316169] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-b5402465-5749-4221-a8d2-1de6379c27b6 tempest-VolumesAdminNegativeTest-869695280 tempest-VolumesAdminNegativeTest-869695280-project-member] Expecting reply to msg a5498e8f1f4343c29749ccc3380a8c6a in queue reply_757213bc08bb49dab178826d88b76f40 [ 534.317153] env[61995]: DEBUG oslo_concurrency.lockutils [None req-71c2fabe-40c2-4828-87e6-8ede90ec8b78 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 5.159s {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 534.317323] env[61995]: DEBUG oslo_concurrency.lockutils [None req-71c2fabe-40c2-4828-87e6-8ede90ec8b78 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 534.317465] env[61995]: DEBUG nova.compute.resource_tracker [None req-71c2fabe-40c2-4828-87e6-8ede90ec8b78 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61995) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 534.317731] env[61995]: DEBUG oslo_concurrency.lockutils [None req-e7c4bbb5-54e6-4867-acb4-2fd4f1abacca tempest-FloatingIPsAssociationNegativeTestJSON-1491050792 tempest-FloatingIPsAssociationNegativeTestJSON-1491050792-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 1.457s {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 534.319460] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-e7c4bbb5-54e6-4867-acb4-2fd4f1abacca tempest-FloatingIPsAssociationNegativeTestJSON-1491050792 tempest-FloatingIPsAssociationNegativeTestJSON-1491050792-project-member] Expecting reply to msg 791eea08cde04781adf7d96302d95ac4 in queue reply_757213bc08bb49dab178826d88b76f40 [ 534.328534] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfc8e18a-b8f3-44a2-a9e7-32567526d166 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.342159] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc8daaf7-70bb-4d9d-858e-9225a3af455c {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.359920] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e675ff1-8004-4407-b7df-375790ffbef0 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.367122] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f1794f1-21b9-4a30-9d3d-31ab00724c5b {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.402785] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a5498e8f1f4343c29749ccc3380a8c6a [ 534.403486] env[61995]: DEBUG nova.compute.resource_tracker [None req-71c2fabe-40c2-4828-87e6-8ede90ec8b78 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181762MB free_disk=139GB free_vcpus=48 pci_devices=None {{(pid=61995) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 534.404142] env[61995]: DEBUG oslo_concurrency.lockutils [None req-71c2fabe-40c2-4828-87e6-8ede90ec8b78 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 534.412419] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 791eea08cde04781adf7d96302d95ac4 [ 534.469575] env[61995]: DEBUG oslo_concurrency.lockutils [None req-eb6c806e-954e-45e9-85aa-88d75b833fb0 tempest-AttachVolumeTestJSON-1605116632 tempest-AttachVolumeTestJSON-1605116632-project-member] Acquiring lock "b819920a-cc74-4718-a054-e81affabcd5b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 534.469874] env[61995]: DEBUG oslo_concurrency.lockutils [None req-eb6c806e-954e-45e9-85aa-88d75b833fb0 tempest-AttachVolumeTestJSON-1605116632 tempest-AttachVolumeTestJSON-1605116632-project-member] Lock "b819920a-cc74-4718-a054-e81affabcd5b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 534.470345] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-eb6c806e-954e-45e9-85aa-88d75b833fb0 tempest-AttachVolumeTestJSON-1605116632 tempest-AttachVolumeTestJSON-1605116632-project-member] Expecting reply to msg 885a23b7237a4fcda1e6cb7efa96b78f in queue reply_757213bc08bb49dab178826d88b76f40 [ 534.486243] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 885a23b7237a4fcda1e6cb7efa96b78f [ 534.515528] env[61995]: ERROR nova.compute.manager [None req-ea640fa2-eaf2-48c6-9579-d88038be7ec7 tempest-MigrationsAdminTest-1704279807 tempest-MigrationsAdminTest-1704279807-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port a37484e9-c5c2-4756-a547-b348cb68b24b, please check neutron logs for more information. [ 534.515528] env[61995]: ERROR nova.compute.manager Traceback (most recent call last): [ 534.515528] env[61995]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1998, in _allocate_network_async [ 534.515528] env[61995]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 534.515528] env[61995]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 534.515528] env[61995]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 534.515528] env[61995]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 534.515528] env[61995]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 534.515528] env[61995]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 534.515528] env[61995]: ERROR nova.compute.manager self.force_reraise() [ 534.515528] env[61995]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 534.515528] env[61995]: ERROR nova.compute.manager raise self.value [ 534.515528] env[61995]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 534.515528] env[61995]: ERROR nova.compute.manager updated_port = self._update_port( [ 534.515528] env[61995]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 534.515528] env[61995]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 534.516103] env[61995]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 534.516103] env[61995]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 534.516103] env[61995]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port a37484e9-c5c2-4756-a547-b348cb68b24b, please check neutron logs for more information. [ 534.516103] env[61995]: ERROR nova.compute.manager [ 534.516103] env[61995]: Traceback (most recent call last): [ 534.516103] env[61995]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 534.516103] env[61995]: listener.cb(fileno) [ 534.516103] env[61995]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 534.516103] env[61995]: result = function(*args, **kwargs) [ 534.516103] env[61995]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 534.516103] env[61995]: return func(*args, **kwargs) [ 534.516103] env[61995]: File "/opt/stack/nova/nova/compute/manager.py", line 2020, in _allocate_network_async [ 534.516103] env[61995]: raise e [ 534.516103] env[61995]: File "/opt/stack/nova/nova/compute/manager.py", line 1998, in _allocate_network_async [ 534.516103] env[61995]: nwinfo = self.network_api.allocate_for_instance( [ 534.516103] env[61995]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 534.516103] env[61995]: created_port_ids = self._update_ports_for_instance( [ 534.516103] env[61995]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 534.516103] env[61995]: with excutils.save_and_reraise_exception(): [ 534.516103] env[61995]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 534.516103] env[61995]: self.force_reraise() [ 534.516103] env[61995]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 534.516103] env[61995]: raise self.value [ 534.516103] env[61995]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 534.516103] env[61995]: updated_port = self._update_port( [ 534.516103] env[61995]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 534.516103] env[61995]: _ensure_no_port_binding_failure(port) [ 534.516103] env[61995]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 534.516103] env[61995]: raise exception.PortBindingFailed(port_id=port['id']) [ 534.516866] env[61995]: nova.exception.PortBindingFailed: Binding failed for port a37484e9-c5c2-4756-a547-b348cb68b24b, please check neutron logs for more information. [ 534.516866] env[61995]: Removing descriptor: 15 [ 534.516866] env[61995]: ERROR nova.compute.manager [None req-ea640fa2-eaf2-48c6-9579-d88038be7ec7 tempest-MigrationsAdminTest-1704279807 tempest-MigrationsAdminTest-1704279807-project-member] [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port a37484e9-c5c2-4756-a547-b348cb68b24b, please check neutron logs for more information. [ 534.516866] env[61995]: ERROR nova.compute.manager [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] Traceback (most recent call last): [ 534.516866] env[61995]: ERROR nova.compute.manager [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] File "/opt/stack/nova/nova/compute/manager.py", line 2903, in _build_resources [ 534.516866] env[61995]: ERROR nova.compute.manager [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] yield resources [ 534.516866] env[61995]: ERROR nova.compute.manager [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] File "/opt/stack/nova/nova/compute/manager.py", line 2650, in _build_and_run_instance [ 534.516866] env[61995]: ERROR nova.compute.manager [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] self.driver.spawn(context, instance, image_meta, [ 534.516866] env[61995]: ERROR nova.compute.manager [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 534.516866] env[61995]: ERROR nova.compute.manager [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] self._vmops.spawn(context, instance, image_meta, injected_files, [ 534.516866] env[61995]: ERROR nova.compute.manager [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 534.516866] env[61995]: ERROR nova.compute.manager [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] vm_ref = self.build_virtual_machine(instance, [ 534.517289] env[61995]: ERROR nova.compute.manager [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 534.517289] env[61995]: ERROR nova.compute.manager [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] vif_infos = vmwarevif.get_vif_info(self._session, [ 534.517289] env[61995]: ERROR nova.compute.manager [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 534.517289] env[61995]: ERROR nova.compute.manager [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] for vif in network_info: [ 534.517289] env[61995]: ERROR nova.compute.manager [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 534.517289] env[61995]: ERROR nova.compute.manager [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] return self._sync_wrapper(fn, *args, **kwargs) [ 534.517289] env[61995]: ERROR nova.compute.manager [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 534.517289] env[61995]: ERROR nova.compute.manager [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] self.wait() [ 534.517289] env[61995]: ERROR nova.compute.manager [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 534.517289] env[61995]: ERROR nova.compute.manager [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] self[:] = self._gt.wait() [ 534.517289] env[61995]: ERROR nova.compute.manager [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 534.517289] env[61995]: ERROR nova.compute.manager [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] return self._exit_event.wait() [ 534.517289] env[61995]: ERROR nova.compute.manager [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 534.517734] env[61995]: ERROR nova.compute.manager [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] result = hub.switch() [ 534.517734] env[61995]: ERROR nova.compute.manager [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 534.517734] env[61995]: ERROR nova.compute.manager [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] return self.greenlet.switch() [ 534.517734] env[61995]: ERROR nova.compute.manager [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 534.517734] env[61995]: ERROR nova.compute.manager [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] result = function(*args, **kwargs) [ 534.517734] env[61995]: ERROR nova.compute.manager [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 534.517734] env[61995]: ERROR nova.compute.manager [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] return func(*args, **kwargs) [ 534.517734] env[61995]: ERROR nova.compute.manager [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] File "/opt/stack/nova/nova/compute/manager.py", line 2020, in _allocate_network_async [ 534.517734] env[61995]: ERROR nova.compute.manager [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] raise e [ 534.517734] env[61995]: ERROR nova.compute.manager [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] File "/opt/stack/nova/nova/compute/manager.py", line 1998, in _allocate_network_async [ 534.517734] env[61995]: ERROR nova.compute.manager [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] nwinfo = self.network_api.allocate_for_instance( [ 534.517734] env[61995]: ERROR nova.compute.manager [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 534.517734] env[61995]: ERROR nova.compute.manager [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] created_port_ids = self._update_ports_for_instance( [ 534.518165] env[61995]: ERROR nova.compute.manager [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 534.518165] env[61995]: ERROR nova.compute.manager [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] with excutils.save_and_reraise_exception(): [ 534.518165] env[61995]: ERROR nova.compute.manager [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 534.518165] env[61995]: ERROR nova.compute.manager [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] self.force_reraise() [ 534.518165] env[61995]: ERROR nova.compute.manager [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 534.518165] env[61995]: ERROR nova.compute.manager [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] raise self.value [ 534.518165] env[61995]: ERROR nova.compute.manager [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 534.518165] env[61995]: ERROR nova.compute.manager [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] updated_port = self._update_port( [ 534.518165] env[61995]: ERROR nova.compute.manager [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 534.518165] env[61995]: ERROR nova.compute.manager [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] _ensure_no_port_binding_failure(port) [ 534.518165] env[61995]: ERROR nova.compute.manager [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 534.518165] env[61995]: ERROR nova.compute.manager [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] raise exception.PortBindingFailed(port_id=port['id']) [ 534.518461] env[61995]: ERROR nova.compute.manager [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] nova.exception.PortBindingFailed: Binding failed for port a37484e9-c5c2-4756-a547-b348cb68b24b, please check neutron logs for more information. [ 534.518461] env[61995]: ERROR nova.compute.manager [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] [ 534.518461] env[61995]: INFO nova.compute.manager [None req-ea640fa2-eaf2-48c6-9579-d88038be7ec7 tempest-MigrationsAdminTest-1704279807 tempest-MigrationsAdminTest-1704279807-project-member] [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] Terminating instance [ 534.522686] env[61995]: DEBUG oslo_concurrency.lockutils [None req-ea640fa2-eaf2-48c6-9579-d88038be7ec7 tempest-MigrationsAdminTest-1704279807 tempest-MigrationsAdminTest-1704279807-project-member] Acquiring lock "refresh_cache-b610e51c-f7a4-4e3a-85c1-28603fc82bab" {{(pid=61995) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 534.522686] env[61995]: DEBUG oslo_concurrency.lockutils [None req-ea640fa2-eaf2-48c6-9579-d88038be7ec7 tempest-MigrationsAdminTest-1704279807 tempest-MigrationsAdminTest-1704279807-project-member] Acquired lock "refresh_cache-b610e51c-f7a4-4e3a-85c1-28603fc82bab" {{(pid=61995) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 534.522686] env[61995]: DEBUG nova.network.neutron [None req-ea640fa2-eaf2-48c6-9579-d88038be7ec7 tempest-MigrationsAdminTest-1704279807 tempest-MigrationsAdminTest-1704279807-project-member] [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] Building network info cache for instance {{(pid=61995) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 534.522686] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-ea640fa2-eaf2-48c6-9579-d88038be7ec7 tempest-MigrationsAdminTest-1704279807 tempest-MigrationsAdminTest-1704279807-project-member] Expecting reply to msg 2ffadc818341447dad468cf3e13cbd0e in queue reply_757213bc08bb49dab178826d88b76f40 [ 534.527301] env[61995]: DEBUG nova.network.neutron [-] [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] Updating instance_info_cache with network_info: [] {{(pid=61995) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 534.528032] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 1cb30e3984154d9bb01134e294abf8dc in queue reply_757213bc08bb49dab178826d88b76f40 [ 534.528884] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2ffadc818341447dad468cf3e13cbd0e [ 534.537916] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1cb30e3984154d9bb01134e294abf8dc [ 534.842585] env[61995]: DEBUG nova.compute.utils [None req-b5402465-5749-4221-a8d2-1de6379c27b6 tempest-VolumesAdminNegativeTest-869695280 tempest-VolumesAdminNegativeTest-869695280-project-member] Using /dev/sd instead of None {{(pid=61995) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 534.842585] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-b5402465-5749-4221-a8d2-1de6379c27b6 tempest-VolumesAdminNegativeTest-869695280 tempest-VolumesAdminNegativeTest-869695280-project-member] Expecting reply to msg 4955bb6912ef4e0692fb3bc19fd1fd3a in queue reply_757213bc08bb49dab178826d88b76f40 [ 534.842585] env[61995]: DEBUG nova.compute.manager [None req-b5402465-5749-4221-a8d2-1de6379c27b6 tempest-VolumesAdminNegativeTest-869695280 tempest-VolumesAdminNegativeTest-869695280-project-member] [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] Allocating IP information in the background. {{(pid=61995) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1990}} [ 534.842585] env[61995]: DEBUG nova.network.neutron [None req-b5402465-5749-4221-a8d2-1de6379c27b6 tempest-VolumesAdminNegativeTest-869695280 tempest-VolumesAdminNegativeTest-869695280-project-member] [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] allocate_for_instance() {{(pid=61995) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 534.849586] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4955bb6912ef4e0692fb3bc19fd1fd3a [ 534.851212] env[61995]: DEBUG nova.compute.manager [None req-b5402465-5749-4221-a8d2-1de6379c27b6 tempest-VolumesAdminNegativeTest-869695280 tempest-VolumesAdminNegativeTest-869695280-project-member] [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] Start building block device mappings for instance. {{(pid=61995) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 534.853192] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-b5402465-5749-4221-a8d2-1de6379c27b6 tempest-VolumesAdminNegativeTest-869695280 tempest-VolumesAdminNegativeTest-869695280-project-member] Expecting reply to msg eb97b396f53e434892f593b629213e7b in queue reply_757213bc08bb49dab178826d88b76f40 [ 534.903768] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg eb97b396f53e434892f593b629213e7b [ 534.973977] env[61995]: DEBUG nova.compute.manager [None req-eb6c806e-954e-45e9-85aa-88d75b833fb0 tempest-AttachVolumeTestJSON-1605116632 tempest-AttachVolumeTestJSON-1605116632-project-member] [instance: b819920a-cc74-4718-a054-e81affabcd5b] Starting instance... {{(pid=61995) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 534.976330] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-eb6c806e-954e-45e9-85aa-88d75b833fb0 tempest-AttachVolumeTestJSON-1605116632 tempest-AttachVolumeTestJSON-1605116632-project-member] Expecting reply to msg b6aa5877b04f45e28aa58e358f38bb29 in queue reply_757213bc08bb49dab178826d88b76f40 [ 534.979521] env[61995]: DEBUG nova.policy [None req-b5402465-5749-4221-a8d2-1de6379c27b6 tempest-VolumesAdminNegativeTest-869695280 tempest-VolumesAdminNegativeTest-869695280-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f9a7b8595e004e73819bb4e706ef3ba2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1cc73a653733481992cb3b728811779d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61995) authorize /opt/stack/nova/nova/policy.py:203}} [ 535.012451] env[61995]: DEBUG nova.compute.manager [req-353c7d14-6fcc-435c-87c4-66ae72685786 req-b23a9403-4fec-4e18-a08b-6f402caa00da service nova] [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] Received event network-changed-b996cfab-151e-438f-929d-3393ad4e64f7 {{(pid=61995) external_instance_event /opt/stack/nova/nova/compute/manager.py:11144}} [ 535.012909] env[61995]: DEBUG nova.compute.manager [req-353c7d14-6fcc-435c-87c4-66ae72685786 req-b23a9403-4fec-4e18-a08b-6f402caa00da service nova] [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] Refreshing instance network info cache due to event network-changed-b996cfab-151e-438f-929d-3393ad4e64f7. {{(pid=61995) external_instance_event /opt/stack/nova/nova/compute/manager.py:11149}} [ 535.013211] env[61995]: DEBUG oslo_concurrency.lockutils [req-353c7d14-6fcc-435c-87c4-66ae72685786 req-b23a9403-4fec-4e18-a08b-6f402caa00da service nova] Acquiring lock "refresh_cache-4c0dd905-f751-4cb4-9be1-ef06518990f8" {{(pid=61995) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 535.013503] env[61995]: DEBUG oslo_concurrency.lockutils [req-353c7d14-6fcc-435c-87c4-66ae72685786 req-b23a9403-4fec-4e18-a08b-6f402caa00da service nova] Acquired lock "refresh_cache-4c0dd905-f751-4cb4-9be1-ef06518990f8" {{(pid=61995) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 535.014112] env[61995]: DEBUG nova.network.neutron [req-353c7d14-6fcc-435c-87c4-66ae72685786 req-b23a9403-4fec-4e18-a08b-6f402caa00da service nova] [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] Refreshing network info cache for port b996cfab-151e-438f-929d-3393ad4e64f7 {{(pid=61995) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 535.014629] env[61995]: INFO oslo_messaging._drivers.amqpdriver [req-353c7d14-6fcc-435c-87c4-66ae72685786 req-b23a9403-4fec-4e18-a08b-6f402caa00da service nova] Expecting reply to msg d71c1217b79944688cb617ff90379096 in queue reply_757213bc08bb49dab178826d88b76f40 [ 535.032131] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e6b977c-1e46-4eff-9826-453ec61fc204 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 535.037172] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b6aa5877b04f45e28aa58e358f38bb29 [ 535.037787] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d71c1217b79944688cb617ff90379096 [ 535.041832] env[61995]: INFO nova.compute.manager [-] [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] Took 1.10 seconds to deallocate network for instance. [ 535.044719] env[61995]: DEBUG nova.compute.claims [None req-6edc7ef5-8dea-41f2-9cd6-a09a4eb02c4e tempest-ServerExternalEventsTest-367181382 tempest-ServerExternalEventsTest-367181382-project-member] [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] Aborting claim: {{(pid=61995) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 535.044983] env[61995]: DEBUG oslo_concurrency.lockutils [None req-6edc7ef5-8dea-41f2-9cd6-a09a4eb02c4e tempest-ServerExternalEventsTest-367181382 tempest-ServerExternalEventsTest-367181382-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 535.049886] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd2a0bed-02b9-4ca6-a4bd-1bb5a78b706b {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 535.083437] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-057f59d2-9d3c-456a-ab25-583667db8831 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 535.091515] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41b8ded9-b53f-48fa-be64-d25566bf5751 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 535.108294] env[61995]: DEBUG nova.compute.provider_tree [None req-e7c4bbb5-54e6-4867-acb4-2fd4f1abacca tempest-FloatingIPsAssociationNegativeTestJSON-1491050792 tempest-FloatingIPsAssociationNegativeTestJSON-1491050792-project-member] Inventory has not changed in ProviderTree for provider: c68e615f-c0c6-4278-bb7b-a579e68878a7 {{(pid=61995) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 535.108931] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-e7c4bbb5-54e6-4867-acb4-2fd4f1abacca tempest-FloatingIPsAssociationNegativeTestJSON-1491050792 tempest-FloatingIPsAssociationNegativeTestJSON-1491050792-project-member] Expecting reply to msg 206c2a44bf7c4481822a3c43c5acab32 in queue reply_757213bc08bb49dab178826d88b76f40 [ 535.119072] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 206c2a44bf7c4481822a3c43c5acab32 [ 535.124442] env[61995]: DEBUG nova.network.neutron [req-353c7d14-6fcc-435c-87c4-66ae72685786 req-b23a9403-4fec-4e18-a08b-6f402caa00da service nova] [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] Instance cache missing network info. {{(pid=61995) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 535.139804] env[61995]: DEBUG nova.network.neutron [None req-ea640fa2-eaf2-48c6-9579-d88038be7ec7 tempest-MigrationsAdminTest-1704279807 tempest-MigrationsAdminTest-1704279807-project-member] [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] Instance cache missing network info. {{(pid=61995) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 535.292459] env[61995]: DEBUG nova.network.neutron [req-353c7d14-6fcc-435c-87c4-66ae72685786 req-b23a9403-4fec-4e18-a08b-6f402caa00da service nova] [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] Updating instance_info_cache with network_info: [] {{(pid=61995) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 535.292993] env[61995]: INFO oslo_messaging._drivers.amqpdriver [req-353c7d14-6fcc-435c-87c4-66ae72685786 req-b23a9403-4fec-4e18-a08b-6f402caa00da service nova] Expecting reply to msg 129df04a25f34411a3f4c06ef13e62ec in queue reply_757213bc08bb49dab178826d88b76f40 [ 535.301986] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 129df04a25f34411a3f4c06ef13e62ec [ 535.361275] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-b5402465-5749-4221-a8d2-1de6379c27b6 tempest-VolumesAdminNegativeTest-869695280 tempest-VolumesAdminNegativeTest-869695280-project-member] Expecting reply to msg 5ddb924a7a4c421cbca426d0b8bccf12 in queue reply_757213bc08bb49dab178826d88b76f40 [ 535.401917] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5ddb924a7a4c421cbca426d0b8bccf12 [ 535.422629] env[61995]: DEBUG nova.network.neutron [None req-ea640fa2-eaf2-48c6-9579-d88038be7ec7 tempest-MigrationsAdminTest-1704279807 tempest-MigrationsAdminTest-1704279807-project-member] [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] Updating instance_info_cache with network_info: [] {{(pid=61995) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 535.423165] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-ea640fa2-eaf2-48c6-9579-d88038be7ec7 tempest-MigrationsAdminTest-1704279807 tempest-MigrationsAdminTest-1704279807-project-member] Expecting reply to msg 2bc1b90a95b045f6b37c3d1cfe427eba in queue reply_757213bc08bb49dab178826d88b76f40 [ 535.430997] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2bc1b90a95b045f6b37c3d1cfe427eba [ 535.498253] env[61995]: DEBUG oslo_concurrency.lockutils [None req-eb6c806e-954e-45e9-85aa-88d75b833fb0 tempest-AttachVolumeTestJSON-1605116632 tempest-AttachVolumeTestJSON-1605116632-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 535.615601] env[61995]: DEBUG nova.scheduler.client.report [None req-e7c4bbb5-54e6-4867-acb4-2fd4f1abacca tempest-FloatingIPsAssociationNegativeTestJSON-1491050792 tempest-FloatingIPsAssociationNegativeTestJSON-1491050792-project-member] Inventory has not changed for provider c68e615f-c0c6-4278-bb7b-a579e68878a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61995) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 535.615601] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-e7c4bbb5-54e6-4867-acb4-2fd4f1abacca tempest-FloatingIPsAssociationNegativeTestJSON-1491050792 tempest-FloatingIPsAssociationNegativeTestJSON-1491050792-project-member] Expecting reply to msg 1e408dac7ee14a46a280ed864913fd9b in queue reply_757213bc08bb49dab178826d88b76f40 [ 535.627342] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1e408dac7ee14a46a280ed864913fd9b [ 535.795755] env[61995]: DEBUG oslo_concurrency.lockutils [req-353c7d14-6fcc-435c-87c4-66ae72685786 req-b23a9403-4fec-4e18-a08b-6f402caa00da service nova] Releasing lock "refresh_cache-4c0dd905-f751-4cb4-9be1-ef06518990f8" {{(pid=61995) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 535.864587] env[61995]: DEBUG nova.compute.manager [None req-b5402465-5749-4221-a8d2-1de6379c27b6 tempest-VolumesAdminNegativeTest-869695280 tempest-VolumesAdminNegativeTest-869695280-project-member] [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] Start spawning the instance on the hypervisor. {{(pid=61995) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 535.893824] env[61995]: DEBUG nova.virt.hardware [None req-b5402465-5749-4221-a8d2-1de6379c27b6 tempest-VolumesAdminNegativeTest-869695280 tempest-VolumesAdminNegativeTest-869695280-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-06T07:57:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-06T07:57:11Z,direct_url=,disk_format='vmdk',id=947125f0-9664-40eb-953e-b1373b076c9f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3dc1e6434ef44fd0be01fd4ebf2b62ef',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-06T07:57:12Z,virtual_size=,visibility=), allow threads: False {{(pid=61995) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 535.893950] env[61995]: DEBUG nova.virt.hardware [None req-b5402465-5749-4221-a8d2-1de6379c27b6 tempest-VolumesAdminNegativeTest-869695280 tempest-VolumesAdminNegativeTest-869695280-project-member] Flavor limits 0:0:0 {{(pid=61995) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 535.894075] env[61995]: DEBUG nova.virt.hardware [None req-b5402465-5749-4221-a8d2-1de6379c27b6 tempest-VolumesAdminNegativeTest-869695280 tempest-VolumesAdminNegativeTest-869695280-project-member] Image limits 0:0:0 {{(pid=61995) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 535.894253] env[61995]: DEBUG nova.virt.hardware [None req-b5402465-5749-4221-a8d2-1de6379c27b6 tempest-VolumesAdminNegativeTest-869695280 tempest-VolumesAdminNegativeTest-869695280-project-member] Flavor pref 0:0:0 {{(pid=61995) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 535.894437] env[61995]: DEBUG nova.virt.hardware [None req-b5402465-5749-4221-a8d2-1de6379c27b6 tempest-VolumesAdminNegativeTest-869695280 tempest-VolumesAdminNegativeTest-869695280-project-member] Image pref 0:0:0 {{(pid=61995) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 535.894523] env[61995]: DEBUG nova.virt.hardware [None req-b5402465-5749-4221-a8d2-1de6379c27b6 tempest-VolumesAdminNegativeTest-869695280 tempest-VolumesAdminNegativeTest-869695280-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61995) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 535.894720] env[61995]: DEBUG nova.virt.hardware [None req-b5402465-5749-4221-a8d2-1de6379c27b6 tempest-VolumesAdminNegativeTest-869695280 tempest-VolumesAdminNegativeTest-869695280-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61995) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 535.894865] env[61995]: DEBUG nova.virt.hardware [None req-b5402465-5749-4221-a8d2-1de6379c27b6 tempest-VolumesAdminNegativeTest-869695280 tempest-VolumesAdminNegativeTest-869695280-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61995) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 535.895016] env[61995]: DEBUG nova.virt.hardware [None req-b5402465-5749-4221-a8d2-1de6379c27b6 tempest-VolumesAdminNegativeTest-869695280 tempest-VolumesAdminNegativeTest-869695280-project-member] Got 1 possible topologies {{(pid=61995) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 535.895163] env[61995]: DEBUG nova.virt.hardware [None req-b5402465-5749-4221-a8d2-1de6379c27b6 tempest-VolumesAdminNegativeTest-869695280 tempest-VolumesAdminNegativeTest-869695280-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61995) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 535.895433] env[61995]: DEBUG nova.virt.hardware [None req-b5402465-5749-4221-a8d2-1de6379c27b6 tempest-VolumesAdminNegativeTest-869695280 tempest-VolumesAdminNegativeTest-869695280-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61995) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 535.901838] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82e81c24-2a38-4fc1-a1b5-943fabf1d5f1 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 535.912527] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f25413b-5da9-4a7d-8278-711241614f41 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 535.927726] env[61995]: DEBUG oslo_concurrency.lockutils [None req-ea640fa2-eaf2-48c6-9579-d88038be7ec7 tempest-MigrationsAdminTest-1704279807 tempest-MigrationsAdminTest-1704279807-project-member] Releasing lock "refresh_cache-b610e51c-f7a4-4e3a-85c1-28603fc82bab" {{(pid=61995) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 535.928166] env[61995]: DEBUG nova.compute.manager [None req-ea640fa2-eaf2-48c6-9579-d88038be7ec7 tempest-MigrationsAdminTest-1704279807 tempest-MigrationsAdminTest-1704279807-project-member] [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] Start destroying the instance on the hypervisor. {{(pid=61995) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 535.928349] env[61995]: DEBUG nova.virt.vmwareapi.vmops [None req-ea640fa2-eaf2-48c6-9579-d88038be7ec7 tempest-MigrationsAdminTest-1704279807 tempest-MigrationsAdminTest-1704279807-project-member] [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] Destroying instance {{(pid=61995) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 535.928824] env[61995]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-18776ebf-fff9-46c0-af02-33334304d546 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 535.937690] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb86283e-1797-46ce-8e22-7449593978b5 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 535.963426] env[61995]: WARNING nova.virt.vmwareapi.vmops [None req-ea640fa2-eaf2-48c6-9579-d88038be7ec7 tempest-MigrationsAdminTest-1704279807 tempest-MigrationsAdminTest-1704279807-project-member] [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance b610e51c-f7a4-4e3a-85c1-28603fc82bab could not be found. [ 535.964444] env[61995]: DEBUG nova.virt.vmwareapi.vmops [None req-ea640fa2-eaf2-48c6-9579-d88038be7ec7 tempest-MigrationsAdminTest-1704279807 tempest-MigrationsAdminTest-1704279807-project-member] [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] Instance destroyed {{(pid=61995) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 535.964444] env[61995]: INFO nova.compute.manager [None req-ea640fa2-eaf2-48c6-9579-d88038be7ec7 tempest-MigrationsAdminTest-1704279807 tempest-MigrationsAdminTest-1704279807-project-member] [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] Took 0.04 seconds to destroy the instance on the hypervisor. [ 535.964444] env[61995]: DEBUG oslo.service.loopingcall [None req-ea640fa2-eaf2-48c6-9579-d88038be7ec7 tempest-MigrationsAdminTest-1704279807 tempest-MigrationsAdminTest-1704279807-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61995) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 535.964444] env[61995]: DEBUG nova.compute.manager [-] [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] Deallocating network for instance {{(pid=61995) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2298}} [ 535.964444] env[61995]: DEBUG nova.network.neutron [-] [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] deallocate_for_instance() {{(pid=61995) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 536.117815] env[61995]: DEBUG oslo_concurrency.lockutils [None req-e7c4bbb5-54e6-4867-acb4-2fd4f1abacca tempest-FloatingIPsAssociationNegativeTestJSON-1491050792 tempest-FloatingIPsAssociationNegativeTestJSON-1491050792-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.800s {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 536.118475] env[61995]: ERROR nova.compute.manager [None req-e7c4bbb5-54e6-4867-acb4-2fd4f1abacca tempest-FloatingIPsAssociationNegativeTestJSON-1491050792 tempest-FloatingIPsAssociationNegativeTestJSON-1491050792-project-member] [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port ef27cf19-df34-49d6-b558-2f4d11dee5a8, please check neutron logs for more information. [ 536.118475] env[61995]: ERROR nova.compute.manager [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] Traceback (most recent call last): [ 536.118475] env[61995]: ERROR nova.compute.manager [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] File "/opt/stack/nova/nova/compute/manager.py", line 2650, in _build_and_run_instance [ 536.118475] env[61995]: ERROR nova.compute.manager [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] self.driver.spawn(context, instance, image_meta, [ 536.118475] env[61995]: ERROR nova.compute.manager [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 536.118475] env[61995]: ERROR nova.compute.manager [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 536.118475] env[61995]: ERROR nova.compute.manager [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 536.118475] env[61995]: ERROR nova.compute.manager [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] vm_ref = self.build_virtual_machine(instance, [ 536.118475] env[61995]: ERROR nova.compute.manager [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 536.118475] env[61995]: ERROR nova.compute.manager [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] vif_infos = vmwarevif.get_vif_info(self._session, [ 536.118475] env[61995]: ERROR nova.compute.manager [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 536.118844] env[61995]: ERROR nova.compute.manager [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] for vif in network_info: [ 536.118844] env[61995]: ERROR nova.compute.manager [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 536.118844] env[61995]: ERROR nova.compute.manager [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] return self._sync_wrapper(fn, *args, **kwargs) [ 536.118844] env[61995]: ERROR nova.compute.manager [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 536.118844] env[61995]: ERROR nova.compute.manager [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] self.wait() [ 536.118844] env[61995]: ERROR nova.compute.manager [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 536.118844] env[61995]: ERROR nova.compute.manager [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] self[:] = self._gt.wait() [ 536.118844] env[61995]: ERROR nova.compute.manager [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 536.118844] env[61995]: ERROR nova.compute.manager [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] return self._exit_event.wait() [ 536.118844] env[61995]: ERROR nova.compute.manager [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 536.118844] env[61995]: ERROR nova.compute.manager [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] result = hub.switch() [ 536.118844] env[61995]: ERROR nova.compute.manager [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 536.118844] env[61995]: ERROR nova.compute.manager [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] return self.greenlet.switch() [ 536.119192] env[61995]: ERROR nova.compute.manager [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 536.119192] env[61995]: ERROR nova.compute.manager [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] result = function(*args, **kwargs) [ 536.119192] env[61995]: ERROR nova.compute.manager [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 536.119192] env[61995]: ERROR nova.compute.manager [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] return func(*args, **kwargs) [ 536.119192] env[61995]: ERROR nova.compute.manager [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] File "/opt/stack/nova/nova/compute/manager.py", line 2020, in _allocate_network_async [ 536.119192] env[61995]: ERROR nova.compute.manager [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] raise e [ 536.119192] env[61995]: ERROR nova.compute.manager [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] File "/opt/stack/nova/nova/compute/manager.py", line 1998, in _allocate_network_async [ 536.119192] env[61995]: ERROR nova.compute.manager [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] nwinfo = self.network_api.allocate_for_instance( [ 536.119192] env[61995]: ERROR nova.compute.manager [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 536.119192] env[61995]: ERROR nova.compute.manager [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] created_port_ids = self._update_ports_for_instance( [ 536.119192] env[61995]: ERROR nova.compute.manager [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 536.119192] env[61995]: ERROR nova.compute.manager [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] with excutils.save_and_reraise_exception(): [ 536.119192] env[61995]: ERROR nova.compute.manager [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 536.119584] env[61995]: ERROR nova.compute.manager [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] self.force_reraise() [ 536.119584] env[61995]: ERROR nova.compute.manager [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 536.119584] env[61995]: ERROR nova.compute.manager [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] raise self.value [ 536.119584] env[61995]: ERROR nova.compute.manager [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 536.119584] env[61995]: ERROR nova.compute.manager [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] updated_port = self._update_port( [ 536.119584] env[61995]: ERROR nova.compute.manager [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 536.119584] env[61995]: ERROR nova.compute.manager [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] _ensure_no_port_binding_failure(port) [ 536.119584] env[61995]: ERROR nova.compute.manager [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 536.119584] env[61995]: ERROR nova.compute.manager [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] raise exception.PortBindingFailed(port_id=port['id']) [ 536.119584] env[61995]: ERROR nova.compute.manager [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] nova.exception.PortBindingFailed: Binding failed for port ef27cf19-df34-49d6-b558-2f4d11dee5a8, please check neutron logs for more information. [ 536.119584] env[61995]: ERROR nova.compute.manager [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] [ 536.119906] env[61995]: DEBUG nova.compute.utils [None req-e7c4bbb5-54e6-4867-acb4-2fd4f1abacca tempest-FloatingIPsAssociationNegativeTestJSON-1491050792 tempest-FloatingIPsAssociationNegativeTestJSON-1491050792-project-member] [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] Binding failed for port ef27cf19-df34-49d6-b558-2f4d11dee5a8, please check neutron logs for more information. {{(pid=61995) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 536.120549] env[61995]: DEBUG oslo_concurrency.lockutils [None req-71c2fabe-40c2-4828-87e6-8ede90ec8b78 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 1.717s {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 536.121409] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-71c2fabe-40c2-4828-87e6-8ede90ec8b78 None None] Expecting reply to msg 7a5318373cd64728aada46ac7f85778a in queue reply_757213bc08bb49dab178826d88b76f40 [ 536.129596] env[61995]: DEBUG nova.compute.manager [None req-e7c4bbb5-54e6-4867-acb4-2fd4f1abacca tempest-FloatingIPsAssociationNegativeTestJSON-1491050792 tempest-FloatingIPsAssociationNegativeTestJSON-1491050792-project-member] [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] Build of instance cd3d1cbb-be77-4fd0-9666-0d544a19a16d was re-scheduled: Binding failed for port ef27cf19-df34-49d6-b558-2f4d11dee5a8, please check neutron logs for more information. {{(pid=61995) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2489}} [ 536.130451] env[61995]: DEBUG nova.compute.manager [None req-e7c4bbb5-54e6-4867-acb4-2fd4f1abacca tempest-FloatingIPsAssociationNegativeTestJSON-1491050792 tempest-FloatingIPsAssociationNegativeTestJSON-1491050792-project-member] [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] Unplugging VIFs for instance {{(pid=61995) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3015}} [ 536.130775] env[61995]: DEBUG oslo_concurrency.lockutils [None req-e7c4bbb5-54e6-4867-acb4-2fd4f1abacca tempest-FloatingIPsAssociationNegativeTestJSON-1491050792 tempest-FloatingIPsAssociationNegativeTestJSON-1491050792-project-member] Acquiring lock "refresh_cache-cd3d1cbb-be77-4fd0-9666-0d544a19a16d" {{(pid=61995) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 536.130915] env[61995]: DEBUG oslo_concurrency.lockutils [None req-e7c4bbb5-54e6-4867-acb4-2fd4f1abacca tempest-FloatingIPsAssociationNegativeTestJSON-1491050792 tempest-FloatingIPsAssociationNegativeTestJSON-1491050792-project-member] Acquired lock "refresh_cache-cd3d1cbb-be77-4fd0-9666-0d544a19a16d" {{(pid=61995) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 536.131034] env[61995]: DEBUG nova.network.neutron [None req-e7c4bbb5-54e6-4867-acb4-2fd4f1abacca tempest-FloatingIPsAssociationNegativeTestJSON-1491050792 tempest-FloatingIPsAssociationNegativeTestJSON-1491050792-project-member] [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] Building network info cache for instance {{(pid=61995) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 536.131480] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-e7c4bbb5-54e6-4867-acb4-2fd4f1abacca tempest-FloatingIPsAssociationNegativeTestJSON-1491050792 tempest-FloatingIPsAssociationNegativeTestJSON-1491050792-project-member] Expecting reply to msg d08d41a021db4e87a9af21f41bf873eb in queue reply_757213bc08bb49dab178826d88b76f40 [ 536.133826] env[61995]: DEBUG nova.network.neutron [-] [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] Instance cache missing network info. {{(pid=61995) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 536.134272] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 44b6d66df0014ee9aa5bdd5e69986912 in queue reply_757213bc08bb49dab178826d88b76f40 [ 536.138440] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d08d41a021db4e87a9af21f41bf873eb [ 536.168211] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7a5318373cd64728aada46ac7f85778a [ 536.179194] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 44b6d66df0014ee9aa5bdd5e69986912 [ 536.218025] env[61995]: DEBUG nova.network.neutron [None req-b5402465-5749-4221-a8d2-1de6379c27b6 tempest-VolumesAdminNegativeTest-869695280 tempest-VolumesAdminNegativeTest-869695280-project-member] [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] Successfully created port: 58958f67-8153-4158-b726-bdfe68b1e491 {{(pid=61995) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 536.627199] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-71c2fabe-40c2-4828-87e6-8ede90ec8b78 None None] Expecting reply to msg 307e9ffba5e64d398071478d407aa6dc in queue reply_757213bc08bb49dab178826d88b76f40 [ 536.635872] env[61995]: DEBUG nova.network.neutron [-] [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] Updating instance_info_cache with network_info: [] {{(pid=61995) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 536.637104] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 0fe2897bae194dffb263bc1fdc12a199 in queue reply_757213bc08bb49dab178826d88b76f40 [ 536.638927] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 307e9ffba5e64d398071478d407aa6dc [ 536.648864] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0fe2897bae194dffb263bc1fdc12a199 [ 536.665098] env[61995]: DEBUG nova.network.neutron [None req-e7c4bbb5-54e6-4867-acb4-2fd4f1abacca tempest-FloatingIPsAssociationNegativeTestJSON-1491050792 tempest-FloatingIPsAssociationNegativeTestJSON-1491050792-project-member] [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] Instance cache missing network info. {{(pid=61995) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 536.712384] env[61995]: ERROR nova.compute.manager [None req-f3a8686d-cadc-4239-bfc5-8c2bb0366eb3 tempest-ServerDiagnosticsTest-1806019006 tempest-ServerDiagnosticsTest-1806019006-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 44c93bf1-8e65-4c5c-bbf7-fa9b74625351, please check neutron logs for more information. [ 536.712384] env[61995]: ERROR nova.compute.manager Traceback (most recent call last): [ 536.712384] env[61995]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1998, in _allocate_network_async [ 536.712384] env[61995]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 536.712384] env[61995]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 536.712384] env[61995]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 536.712384] env[61995]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 536.712384] env[61995]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 536.712384] env[61995]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 536.712384] env[61995]: ERROR nova.compute.manager self.force_reraise() [ 536.712384] env[61995]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 536.712384] env[61995]: ERROR nova.compute.manager raise self.value [ 536.712384] env[61995]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 536.712384] env[61995]: ERROR nova.compute.manager updated_port = self._update_port( [ 536.712384] env[61995]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 536.712384] env[61995]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 536.713170] env[61995]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 536.713170] env[61995]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 536.713170] env[61995]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 44c93bf1-8e65-4c5c-bbf7-fa9b74625351, please check neutron logs for more information. [ 536.713170] env[61995]: ERROR nova.compute.manager [ 536.713170] env[61995]: Traceback (most recent call last): [ 536.713170] env[61995]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 536.713170] env[61995]: listener.cb(fileno) [ 536.713170] env[61995]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 536.713170] env[61995]: result = function(*args, **kwargs) [ 536.713170] env[61995]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 536.713170] env[61995]: return func(*args, **kwargs) [ 536.713170] env[61995]: File "/opt/stack/nova/nova/compute/manager.py", line 2020, in _allocate_network_async [ 536.713170] env[61995]: raise e [ 536.713170] env[61995]: File "/opt/stack/nova/nova/compute/manager.py", line 1998, in _allocate_network_async [ 536.713170] env[61995]: nwinfo = self.network_api.allocate_for_instance( [ 536.713170] env[61995]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 536.713170] env[61995]: created_port_ids = self._update_ports_for_instance( [ 536.713170] env[61995]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 536.713170] env[61995]: with excutils.save_and_reraise_exception(): [ 536.713170] env[61995]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 536.713170] env[61995]: self.force_reraise() [ 536.713170] env[61995]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 536.713170] env[61995]: raise self.value [ 536.713170] env[61995]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 536.713170] env[61995]: updated_port = self._update_port( [ 536.713170] env[61995]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 536.713170] env[61995]: _ensure_no_port_binding_failure(port) [ 536.713170] env[61995]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 536.713170] env[61995]: raise exception.PortBindingFailed(port_id=port['id']) [ 536.714466] env[61995]: nova.exception.PortBindingFailed: Binding failed for port 44c93bf1-8e65-4c5c-bbf7-fa9b74625351, please check neutron logs for more information. [ 536.714466] env[61995]: Removing descriptor: 17 [ 536.714466] env[61995]: ERROR nova.compute.manager [None req-f3a8686d-cadc-4239-bfc5-8c2bb0366eb3 tempest-ServerDiagnosticsTest-1806019006 tempest-ServerDiagnosticsTest-1806019006-project-member] [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 44c93bf1-8e65-4c5c-bbf7-fa9b74625351, please check neutron logs for more information. [ 536.714466] env[61995]: ERROR nova.compute.manager [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] Traceback (most recent call last): [ 536.714466] env[61995]: ERROR nova.compute.manager [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] File "/opt/stack/nova/nova/compute/manager.py", line 2903, in _build_resources [ 536.714466] env[61995]: ERROR nova.compute.manager [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] yield resources [ 536.714466] env[61995]: ERROR nova.compute.manager [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] File "/opt/stack/nova/nova/compute/manager.py", line 2650, in _build_and_run_instance [ 536.714466] env[61995]: ERROR nova.compute.manager [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] self.driver.spawn(context, instance, image_meta, [ 536.714466] env[61995]: ERROR nova.compute.manager [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 536.714466] env[61995]: ERROR nova.compute.manager [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] self._vmops.spawn(context, instance, image_meta, injected_files, [ 536.714466] env[61995]: ERROR nova.compute.manager [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 536.714466] env[61995]: ERROR nova.compute.manager [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] vm_ref = self.build_virtual_machine(instance, [ 536.715063] env[61995]: ERROR nova.compute.manager [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 536.715063] env[61995]: ERROR nova.compute.manager [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] vif_infos = vmwarevif.get_vif_info(self._session, [ 536.715063] env[61995]: ERROR nova.compute.manager [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 536.715063] env[61995]: ERROR nova.compute.manager [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] for vif in network_info: [ 536.715063] env[61995]: ERROR nova.compute.manager [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 536.715063] env[61995]: ERROR nova.compute.manager [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] return self._sync_wrapper(fn, *args, **kwargs) [ 536.715063] env[61995]: ERROR nova.compute.manager [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 536.715063] env[61995]: ERROR nova.compute.manager [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] self.wait() [ 536.715063] env[61995]: ERROR nova.compute.manager [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 536.715063] env[61995]: ERROR nova.compute.manager [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] self[:] = self._gt.wait() [ 536.715063] env[61995]: ERROR nova.compute.manager [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 536.715063] env[61995]: ERROR nova.compute.manager [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] return self._exit_event.wait() [ 536.715063] env[61995]: ERROR nova.compute.manager [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 536.715713] env[61995]: ERROR nova.compute.manager [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] result = hub.switch() [ 536.715713] env[61995]: ERROR nova.compute.manager [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 536.715713] env[61995]: ERROR nova.compute.manager [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] return self.greenlet.switch() [ 536.715713] env[61995]: ERROR nova.compute.manager [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 536.715713] env[61995]: ERROR nova.compute.manager [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] result = function(*args, **kwargs) [ 536.715713] env[61995]: ERROR nova.compute.manager [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 536.715713] env[61995]: ERROR nova.compute.manager [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] return func(*args, **kwargs) [ 536.715713] env[61995]: ERROR nova.compute.manager [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] File "/opt/stack/nova/nova/compute/manager.py", line 2020, in _allocate_network_async [ 536.715713] env[61995]: ERROR nova.compute.manager [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] raise e [ 536.715713] env[61995]: ERROR nova.compute.manager [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] File "/opt/stack/nova/nova/compute/manager.py", line 1998, in _allocate_network_async [ 536.715713] env[61995]: ERROR nova.compute.manager [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] nwinfo = self.network_api.allocate_for_instance( [ 536.715713] env[61995]: ERROR nova.compute.manager [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 536.715713] env[61995]: ERROR nova.compute.manager [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] created_port_ids = self._update_ports_for_instance( [ 536.716387] env[61995]: ERROR nova.compute.manager [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 536.716387] env[61995]: ERROR nova.compute.manager [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] with excutils.save_and_reraise_exception(): [ 536.716387] env[61995]: ERROR nova.compute.manager [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 536.716387] env[61995]: ERROR nova.compute.manager [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] self.force_reraise() [ 536.716387] env[61995]: ERROR nova.compute.manager [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 536.716387] env[61995]: ERROR nova.compute.manager [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] raise self.value [ 536.716387] env[61995]: ERROR nova.compute.manager [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 536.716387] env[61995]: ERROR nova.compute.manager [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] updated_port = self._update_port( [ 536.716387] env[61995]: ERROR nova.compute.manager [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 536.716387] env[61995]: ERROR nova.compute.manager [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] _ensure_no_port_binding_failure(port) [ 536.716387] env[61995]: ERROR nova.compute.manager [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 536.716387] env[61995]: ERROR nova.compute.manager [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] raise exception.PortBindingFailed(port_id=port['id']) [ 536.717066] env[61995]: ERROR nova.compute.manager [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] nova.exception.PortBindingFailed: Binding failed for port 44c93bf1-8e65-4c5c-bbf7-fa9b74625351, please check neutron logs for more information. [ 536.717066] env[61995]: ERROR nova.compute.manager [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] [ 536.717066] env[61995]: INFO nova.compute.manager [None req-f3a8686d-cadc-4239-bfc5-8c2bb0366eb3 tempest-ServerDiagnosticsTest-1806019006 tempest-ServerDiagnosticsTest-1806019006-project-member] [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] Terminating instance [ 536.717066] env[61995]: DEBUG oslo_concurrency.lockutils [None req-f3a8686d-cadc-4239-bfc5-8c2bb0366eb3 tempest-ServerDiagnosticsTest-1806019006 tempest-ServerDiagnosticsTest-1806019006-project-member] Acquiring lock "refresh_cache-2a07c11a-4e95-47db-bf50-5ad720403faa" {{(pid=61995) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 536.717066] env[61995]: DEBUG oslo_concurrency.lockutils [None req-f3a8686d-cadc-4239-bfc5-8c2bb0366eb3 tempest-ServerDiagnosticsTest-1806019006 tempest-ServerDiagnosticsTest-1806019006-project-member] Acquired lock "refresh_cache-2a07c11a-4e95-47db-bf50-5ad720403faa" {{(pid=61995) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 536.717066] env[61995]: DEBUG nova.network.neutron [None req-f3a8686d-cadc-4239-bfc5-8c2bb0366eb3 tempest-ServerDiagnosticsTest-1806019006 tempest-ServerDiagnosticsTest-1806019006-project-member] [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] Building network info cache for instance {{(pid=61995) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 536.717405] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-f3a8686d-cadc-4239-bfc5-8c2bb0366eb3 tempest-ServerDiagnosticsTest-1806019006 tempest-ServerDiagnosticsTest-1806019006-project-member] Expecting reply to msg 225bebffa3fb46769d6546bc27d76a40 in queue reply_757213bc08bb49dab178826d88b76f40 [ 536.724260] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 225bebffa3fb46769d6546bc27d76a40 [ 536.809833] env[61995]: DEBUG nova.network.neutron [None req-e7c4bbb5-54e6-4867-acb4-2fd4f1abacca tempest-FloatingIPsAssociationNegativeTestJSON-1491050792 tempest-FloatingIPsAssociationNegativeTestJSON-1491050792-project-member] [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] Updating instance_info_cache with network_info: [] {{(pid=61995) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 536.810966] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-e7c4bbb5-54e6-4867-acb4-2fd4f1abacca tempest-FloatingIPsAssociationNegativeTestJSON-1491050792 tempest-FloatingIPsAssociationNegativeTestJSON-1491050792-project-member] Expecting reply to msg 0e269ba12afd413ba8817a5238d0c94a in queue reply_757213bc08bb49dab178826d88b76f40 [ 536.827060] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0e269ba12afd413ba8817a5238d0c94a [ 536.929719] env[61995]: DEBUG oslo_concurrency.lockutils [None req-a85a9074-04e3-4381-94ff-506e7a8711e5 tempest-AttachInterfacesV270Test-1294541225 tempest-AttachInterfacesV270Test-1294541225-project-member] Acquiring lock "2dbe6731-bab0-4a47-91c7-a1d8b42dcec4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 536.930000] env[61995]: DEBUG oslo_concurrency.lockutils [None req-a85a9074-04e3-4381-94ff-506e7a8711e5 tempest-AttachInterfacesV270Test-1294541225 tempest-AttachInterfacesV270Test-1294541225-project-member] Lock "2dbe6731-bab0-4a47-91c7-a1d8b42dcec4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 536.930478] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-a85a9074-04e3-4381-94ff-506e7a8711e5 tempest-AttachInterfacesV270Test-1294541225 tempest-AttachInterfacesV270Test-1294541225-project-member] Expecting reply to msg 0aa542fec059434292649a6d8068b908 in queue reply_757213bc08bb49dab178826d88b76f40 [ 536.943533] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0aa542fec059434292649a6d8068b908 [ 537.139866] env[61995]: INFO nova.compute.manager [-] [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] Took 1.18 seconds to deallocate network for instance. [ 537.142194] env[61995]: DEBUG nova.compute.claims [None req-ea640fa2-eaf2-48c6-9579-d88038be7ec7 tempest-MigrationsAdminTest-1704279807 tempest-MigrationsAdminTest-1704279807-project-member] [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] Aborting claim: {{(pid=61995) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 537.142409] env[61995]: DEBUG oslo_concurrency.lockutils [None req-ea640fa2-eaf2-48c6-9579-d88038be7ec7 tempest-MigrationsAdminTest-1704279807 tempest-MigrationsAdminTest-1704279807-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 537.148678] env[61995]: DEBUG nova.compute.resource_tracker [None req-71c2fabe-40c2-4828-87e6-8ede90ec8b78 None None] Instance 1bf7f97f-ccfe-4ee0-a6fb-2234769c4b62 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61995) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 537.148840] env[61995]: DEBUG nova.compute.resource_tracker [None req-71c2fabe-40c2-4828-87e6-8ede90ec8b78 None None] Instance 5527576a-d56d-42c3-a7f7-02c66c0d1b3d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61995) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 537.149409] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-71c2fabe-40c2-4828-87e6-8ede90ec8b78 None None] Expecting reply to msg ad3355ab0e4d4484835012a6186ce5f8 in queue reply_757213bc08bb49dab178826d88b76f40 [ 537.180859] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ad3355ab0e4d4484835012a6186ce5f8 [ 537.264588] env[61995]: DEBUG nova.network.neutron [None req-f3a8686d-cadc-4239-bfc5-8c2bb0366eb3 tempest-ServerDiagnosticsTest-1806019006 tempest-ServerDiagnosticsTest-1806019006-project-member] [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] Instance cache missing network info. {{(pid=61995) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 537.313152] env[61995]: DEBUG oslo_concurrency.lockutils [None req-e7c4bbb5-54e6-4867-acb4-2fd4f1abacca tempest-FloatingIPsAssociationNegativeTestJSON-1491050792 tempest-FloatingIPsAssociationNegativeTestJSON-1491050792-project-member] Releasing lock "refresh_cache-cd3d1cbb-be77-4fd0-9666-0d544a19a16d" {{(pid=61995) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 537.313396] env[61995]: DEBUG nova.compute.manager [None req-e7c4bbb5-54e6-4867-acb4-2fd4f1abacca tempest-FloatingIPsAssociationNegativeTestJSON-1491050792 tempest-FloatingIPsAssociationNegativeTestJSON-1491050792-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61995) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3038}} [ 537.313601] env[61995]: DEBUG nova.compute.manager [None req-e7c4bbb5-54e6-4867-acb4-2fd4f1abacca tempest-FloatingIPsAssociationNegativeTestJSON-1491050792 tempest-FloatingIPsAssociationNegativeTestJSON-1491050792-project-member] [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] Deallocating network for instance {{(pid=61995) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2298}} [ 537.313769] env[61995]: DEBUG nova.network.neutron [None req-e7c4bbb5-54e6-4867-acb4-2fd4f1abacca tempest-FloatingIPsAssociationNegativeTestJSON-1491050792 tempest-FloatingIPsAssociationNegativeTestJSON-1491050792-project-member] [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] deallocate_for_instance() {{(pid=61995) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 537.346769] env[61995]: DEBUG nova.network.neutron [None req-e7c4bbb5-54e6-4867-acb4-2fd4f1abacca tempest-FloatingIPsAssociationNegativeTestJSON-1491050792 tempest-FloatingIPsAssociationNegativeTestJSON-1491050792-project-member] [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] Instance cache missing network info. {{(pid=61995) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 537.347364] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-e7c4bbb5-54e6-4867-acb4-2fd4f1abacca tempest-FloatingIPsAssociationNegativeTestJSON-1491050792 tempest-FloatingIPsAssociationNegativeTestJSON-1491050792-project-member] Expecting reply to msg e000d0759c2c42d397c4ea6591dc6a90 in queue reply_757213bc08bb49dab178826d88b76f40 [ 537.360115] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e000d0759c2c42d397c4ea6591dc6a90 [ 537.364160] env[61995]: DEBUG nova.network.neutron [None req-f3a8686d-cadc-4239-bfc5-8c2bb0366eb3 tempest-ServerDiagnosticsTest-1806019006 tempest-ServerDiagnosticsTest-1806019006-project-member] [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] Updating instance_info_cache with network_info: [] {{(pid=61995) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 537.364644] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-f3a8686d-cadc-4239-bfc5-8c2bb0366eb3 tempest-ServerDiagnosticsTest-1806019006 tempest-ServerDiagnosticsTest-1806019006-project-member] Expecting reply to msg 3796772193114dcbac11daf56e94e905 in queue reply_757213bc08bb49dab178826d88b76f40 [ 537.372792] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3796772193114dcbac11daf56e94e905 [ 537.432915] env[61995]: DEBUG nova.compute.manager [None req-a85a9074-04e3-4381-94ff-506e7a8711e5 tempest-AttachInterfacesV270Test-1294541225 tempest-AttachInterfacesV270Test-1294541225-project-member] [instance: 2dbe6731-bab0-4a47-91c7-a1d8b42dcec4] Starting instance... {{(pid=61995) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 537.434790] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-a85a9074-04e3-4381-94ff-506e7a8711e5 tempest-AttachInterfacesV270Test-1294541225 tempest-AttachInterfacesV270Test-1294541225-project-member] Expecting reply to msg ee5a14460ec744128dc25ef3f72f33d0 in queue reply_757213bc08bb49dab178826d88b76f40 [ 537.477542] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ee5a14460ec744128dc25ef3f72f33d0 [ 537.652313] env[61995]: DEBUG nova.compute.resource_tracker [None req-71c2fabe-40c2-4828-87e6-8ede90ec8b78 None None] Instance cd3d1cbb-be77-4fd0-9666-0d544a19a16d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61995) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 537.652481] env[61995]: DEBUG nova.compute.resource_tracker [None req-71c2fabe-40c2-4828-87e6-8ede90ec8b78 None None] Instance 4c0dd905-f751-4cb4-9be1-ef06518990f8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61995) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 537.652599] env[61995]: DEBUG nova.compute.resource_tracker [None req-71c2fabe-40c2-4828-87e6-8ede90ec8b78 None None] Instance b610e51c-f7a4-4e3a-85c1-28603fc82bab actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61995) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 537.652710] env[61995]: DEBUG nova.compute.resource_tracker [None req-71c2fabe-40c2-4828-87e6-8ede90ec8b78 None None] Instance 2a07c11a-4e95-47db-bf50-5ad720403faa actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61995) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 537.652818] env[61995]: DEBUG nova.compute.resource_tracker [None req-71c2fabe-40c2-4828-87e6-8ede90ec8b78 None None] Instance cfffe8e0-4074-41c9-8e1b-49d621fc3c1b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61995) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 537.653385] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-71c2fabe-40c2-4828-87e6-8ede90ec8b78 None None] Expecting reply to msg 0d12437bfb2f4c1eaceb036571856fa9 in queue reply_757213bc08bb49dab178826d88b76f40 [ 537.663795] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0d12437bfb2f4c1eaceb036571856fa9 [ 537.838227] env[61995]: ERROR nova.compute.manager [None req-b5402465-5749-4221-a8d2-1de6379c27b6 tempest-VolumesAdminNegativeTest-869695280 tempest-VolumesAdminNegativeTest-869695280-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 58958f67-8153-4158-b726-bdfe68b1e491, please check neutron logs for more information. [ 537.838227] env[61995]: ERROR nova.compute.manager Traceback (most recent call last): [ 537.838227] env[61995]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1998, in _allocate_network_async [ 537.838227] env[61995]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 537.838227] env[61995]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 537.838227] env[61995]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 537.838227] env[61995]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 537.838227] env[61995]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 537.838227] env[61995]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 537.838227] env[61995]: ERROR nova.compute.manager self.force_reraise() [ 537.838227] env[61995]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 537.838227] env[61995]: ERROR nova.compute.manager raise self.value [ 537.838227] env[61995]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 537.838227] env[61995]: ERROR nova.compute.manager updated_port = self._update_port( [ 537.838227] env[61995]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 537.838227] env[61995]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 537.838655] env[61995]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 537.838655] env[61995]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 537.838655] env[61995]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 58958f67-8153-4158-b726-bdfe68b1e491, please check neutron logs for more information. [ 537.838655] env[61995]: ERROR nova.compute.manager [ 537.838655] env[61995]: Traceback (most recent call last): [ 537.838655] env[61995]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 537.838655] env[61995]: listener.cb(fileno) [ 537.838655] env[61995]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 537.838655] env[61995]: result = function(*args, **kwargs) [ 537.838655] env[61995]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 537.838655] env[61995]: return func(*args, **kwargs) [ 537.838655] env[61995]: File "/opt/stack/nova/nova/compute/manager.py", line 2020, in _allocate_network_async [ 537.838655] env[61995]: raise e [ 537.838655] env[61995]: File "/opt/stack/nova/nova/compute/manager.py", line 1998, in _allocate_network_async [ 537.838655] env[61995]: nwinfo = self.network_api.allocate_for_instance( [ 537.838655] env[61995]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 537.838655] env[61995]: created_port_ids = self._update_ports_for_instance( [ 537.838655] env[61995]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 537.838655] env[61995]: with excutils.save_and_reraise_exception(): [ 537.838655] env[61995]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 537.838655] env[61995]: self.force_reraise() [ 537.838655] env[61995]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 537.838655] env[61995]: raise self.value [ 537.838655] env[61995]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 537.838655] env[61995]: updated_port = self._update_port( [ 537.838655] env[61995]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 537.838655] env[61995]: _ensure_no_port_binding_failure(port) [ 537.838655] env[61995]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 537.838655] env[61995]: raise exception.PortBindingFailed(port_id=port['id']) [ 537.839382] env[61995]: nova.exception.PortBindingFailed: Binding failed for port 58958f67-8153-4158-b726-bdfe68b1e491, please check neutron logs for more information. [ 537.839382] env[61995]: Removing descriptor: 15 [ 537.839382] env[61995]: ERROR nova.compute.manager [None req-b5402465-5749-4221-a8d2-1de6379c27b6 tempest-VolumesAdminNegativeTest-869695280 tempest-VolumesAdminNegativeTest-869695280-project-member] [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 58958f67-8153-4158-b726-bdfe68b1e491, please check neutron logs for more information. [ 537.839382] env[61995]: ERROR nova.compute.manager [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] Traceback (most recent call last): [ 537.839382] env[61995]: ERROR nova.compute.manager [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] File "/opt/stack/nova/nova/compute/manager.py", line 2903, in _build_resources [ 537.839382] env[61995]: ERROR nova.compute.manager [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] yield resources [ 537.839382] env[61995]: ERROR nova.compute.manager [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] File "/opt/stack/nova/nova/compute/manager.py", line 2650, in _build_and_run_instance [ 537.839382] env[61995]: ERROR nova.compute.manager [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] self.driver.spawn(context, instance, image_meta, [ 537.839382] env[61995]: ERROR nova.compute.manager [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 537.839382] env[61995]: ERROR nova.compute.manager [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 537.839382] env[61995]: ERROR nova.compute.manager [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 537.839382] env[61995]: ERROR nova.compute.manager [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] vm_ref = self.build_virtual_machine(instance, [ 537.839713] env[61995]: ERROR nova.compute.manager [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 537.839713] env[61995]: ERROR nova.compute.manager [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] vif_infos = vmwarevif.get_vif_info(self._session, [ 537.839713] env[61995]: ERROR nova.compute.manager [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 537.839713] env[61995]: ERROR nova.compute.manager [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] for vif in network_info: [ 537.839713] env[61995]: ERROR nova.compute.manager [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 537.839713] env[61995]: ERROR nova.compute.manager [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] return self._sync_wrapper(fn, *args, **kwargs) [ 537.839713] env[61995]: ERROR nova.compute.manager [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 537.839713] env[61995]: ERROR nova.compute.manager [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] self.wait() [ 537.839713] env[61995]: ERROR nova.compute.manager [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 537.839713] env[61995]: ERROR nova.compute.manager [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] self[:] = self._gt.wait() [ 537.839713] env[61995]: ERROR nova.compute.manager [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 537.839713] env[61995]: ERROR nova.compute.manager [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] return self._exit_event.wait() [ 537.839713] env[61995]: ERROR nova.compute.manager [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 537.840081] env[61995]: ERROR nova.compute.manager [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] result = hub.switch() [ 537.840081] env[61995]: ERROR nova.compute.manager [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 537.840081] env[61995]: ERROR nova.compute.manager [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] return self.greenlet.switch() [ 537.840081] env[61995]: ERROR nova.compute.manager [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 537.840081] env[61995]: ERROR nova.compute.manager [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] result = function(*args, **kwargs) [ 537.840081] env[61995]: ERROR nova.compute.manager [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 537.840081] env[61995]: ERROR nova.compute.manager [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] return func(*args, **kwargs) [ 537.840081] env[61995]: ERROR nova.compute.manager [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] File "/opt/stack/nova/nova/compute/manager.py", line 2020, in _allocate_network_async [ 537.840081] env[61995]: ERROR nova.compute.manager [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] raise e [ 537.840081] env[61995]: ERROR nova.compute.manager [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] File "/opt/stack/nova/nova/compute/manager.py", line 1998, in _allocate_network_async [ 537.840081] env[61995]: ERROR nova.compute.manager [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] nwinfo = self.network_api.allocate_for_instance( [ 537.840081] env[61995]: ERROR nova.compute.manager [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 537.840081] env[61995]: ERROR nova.compute.manager [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] created_port_ids = self._update_ports_for_instance( [ 537.840399] env[61995]: ERROR nova.compute.manager [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 537.840399] env[61995]: ERROR nova.compute.manager [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] with excutils.save_and_reraise_exception(): [ 537.840399] env[61995]: ERROR nova.compute.manager [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 537.840399] env[61995]: ERROR nova.compute.manager [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] self.force_reraise() [ 537.840399] env[61995]: ERROR nova.compute.manager [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 537.840399] env[61995]: ERROR nova.compute.manager [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] raise self.value [ 537.840399] env[61995]: ERROR nova.compute.manager [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 537.840399] env[61995]: ERROR nova.compute.manager [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] updated_port = self._update_port( [ 537.840399] env[61995]: ERROR nova.compute.manager [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 537.840399] env[61995]: ERROR nova.compute.manager [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] _ensure_no_port_binding_failure(port) [ 537.840399] env[61995]: ERROR nova.compute.manager [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 537.840399] env[61995]: ERROR nova.compute.manager [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] raise exception.PortBindingFailed(port_id=port['id']) [ 537.840682] env[61995]: ERROR nova.compute.manager [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] nova.exception.PortBindingFailed: Binding failed for port 58958f67-8153-4158-b726-bdfe68b1e491, please check neutron logs for more information. [ 537.840682] env[61995]: ERROR nova.compute.manager [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] [ 537.840682] env[61995]: INFO nova.compute.manager [None req-b5402465-5749-4221-a8d2-1de6379c27b6 tempest-VolumesAdminNegativeTest-869695280 tempest-VolumesAdminNegativeTest-869695280-project-member] [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] Terminating instance [ 537.841909] env[61995]: DEBUG oslo_concurrency.lockutils [None req-b5402465-5749-4221-a8d2-1de6379c27b6 tempest-VolumesAdminNegativeTest-869695280 tempest-VolumesAdminNegativeTest-869695280-project-member] Acquiring lock "refresh_cache-cfffe8e0-4074-41c9-8e1b-49d621fc3c1b" {{(pid=61995) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 537.842117] env[61995]: DEBUG oslo_concurrency.lockutils [None req-b5402465-5749-4221-a8d2-1de6379c27b6 tempest-VolumesAdminNegativeTest-869695280 tempest-VolumesAdminNegativeTest-869695280-project-member] Acquired lock "refresh_cache-cfffe8e0-4074-41c9-8e1b-49d621fc3c1b" {{(pid=61995) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 537.842343] env[61995]: DEBUG nova.network.neutron [None req-b5402465-5749-4221-a8d2-1de6379c27b6 tempest-VolumesAdminNegativeTest-869695280 tempest-VolumesAdminNegativeTest-869695280-project-member] [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] Building network info cache for instance {{(pid=61995) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 537.842767] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-b5402465-5749-4221-a8d2-1de6379c27b6 tempest-VolumesAdminNegativeTest-869695280 tempest-VolumesAdminNegativeTest-869695280-project-member] Expecting reply to msg 96e621631aee4dd1a5a189fd7b611ed0 in queue reply_757213bc08bb49dab178826d88b76f40 [ 537.851215] env[61995]: DEBUG nova.network.neutron [None req-e7c4bbb5-54e6-4867-acb4-2fd4f1abacca tempest-FloatingIPsAssociationNegativeTestJSON-1491050792 tempest-FloatingIPsAssociationNegativeTestJSON-1491050792-project-member] [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] Updating instance_info_cache with network_info: [] {{(pid=61995) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 537.852297] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-e7c4bbb5-54e6-4867-acb4-2fd4f1abacca tempest-FloatingIPsAssociationNegativeTestJSON-1491050792 tempest-FloatingIPsAssociationNegativeTestJSON-1491050792-project-member] Expecting reply to msg 97bf4a0bb28a48b8a850aafb2cace4db in queue reply_757213bc08bb49dab178826d88b76f40 [ 537.852663] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 96e621631aee4dd1a5a189fd7b611ed0 [ 537.864128] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 97bf4a0bb28a48b8a850aafb2cace4db [ 537.867325] env[61995]: DEBUG oslo_concurrency.lockutils [None req-f3a8686d-cadc-4239-bfc5-8c2bb0366eb3 tempest-ServerDiagnosticsTest-1806019006 tempest-ServerDiagnosticsTest-1806019006-project-member] Releasing lock "refresh_cache-2a07c11a-4e95-47db-bf50-5ad720403faa" {{(pid=61995) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 537.867653] env[61995]: DEBUG nova.compute.manager [None req-f3a8686d-cadc-4239-bfc5-8c2bb0366eb3 tempest-ServerDiagnosticsTest-1806019006 tempest-ServerDiagnosticsTest-1806019006-project-member] [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] Start destroying the instance on the hypervisor. {{(pid=61995) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 537.867961] env[61995]: DEBUG nova.virt.vmwareapi.vmops [None req-f3a8686d-cadc-4239-bfc5-8c2bb0366eb3 tempest-ServerDiagnosticsTest-1806019006 tempest-ServerDiagnosticsTest-1806019006-project-member] [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] Destroying instance {{(pid=61995) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 537.868403] env[61995]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-19cb33ba-daf9-44cd-ab1f-a7ad7c3877d0 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.877749] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32867a51-e20e-4d80-a193-fff8727bb478 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.904544] env[61995]: WARNING nova.virt.vmwareapi.vmops [None req-f3a8686d-cadc-4239-bfc5-8c2bb0366eb3 tempest-ServerDiagnosticsTest-1806019006 tempest-ServerDiagnosticsTest-1806019006-project-member] [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 2a07c11a-4e95-47db-bf50-5ad720403faa could not be found. [ 537.904782] env[61995]: DEBUG nova.virt.vmwareapi.vmops [None req-f3a8686d-cadc-4239-bfc5-8c2bb0366eb3 tempest-ServerDiagnosticsTest-1806019006 tempest-ServerDiagnosticsTest-1806019006-project-member] [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] Instance destroyed {{(pid=61995) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 537.904953] env[61995]: INFO nova.compute.manager [None req-f3a8686d-cadc-4239-bfc5-8c2bb0366eb3 tempest-ServerDiagnosticsTest-1806019006 tempest-ServerDiagnosticsTest-1806019006-project-member] [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] Took 0.04 seconds to destroy the instance on the hypervisor. [ 537.905194] env[61995]: DEBUG oslo.service.loopingcall [None req-f3a8686d-cadc-4239-bfc5-8c2bb0366eb3 tempest-ServerDiagnosticsTest-1806019006 tempest-ServerDiagnosticsTest-1806019006-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61995) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 537.905389] env[61995]: DEBUG nova.compute.manager [-] [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] Deallocating network for instance {{(pid=61995) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2298}} [ 537.905546] env[61995]: DEBUG nova.network.neutron [-] [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] deallocate_for_instance() {{(pid=61995) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 537.940457] env[61995]: DEBUG nova.network.neutron [-] [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] Instance cache missing network info. {{(pid=61995) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 537.941042] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg dd5475f139b346dbb62c693e267b3322 in queue reply_757213bc08bb49dab178826d88b76f40 [ 537.953665] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dd5475f139b346dbb62c693e267b3322 [ 537.971270] env[61995]: DEBUG oslo_concurrency.lockutils [None req-a85a9074-04e3-4381-94ff-506e7a8711e5 tempest-AttachInterfacesV270Test-1294541225 tempest-AttachInterfacesV270Test-1294541225-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 538.160258] env[61995]: DEBUG nova.compute.resource_tracker [None req-71c2fabe-40c2-4828-87e6-8ede90ec8b78 None None] Instance b819920a-cc74-4718-a054-e81affabcd5b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61995) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 538.160258] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-71c2fabe-40c2-4828-87e6-8ede90ec8b78 None None] Expecting reply to msg 50c36487d8a4433ca3bf848fef6de65b in queue reply_757213bc08bb49dab178826d88b76f40 [ 538.180337] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 50c36487d8a4433ca3bf848fef6de65b [ 538.205286] env[61995]: DEBUG nova.compute.manager [req-c1d7c08d-07bd-48f9-8133-575f4c703ec0 req-df8b9954-a4c2-4d49-8ad6-1f17dc263a08 service nova] [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] Received event network-changed-a37484e9-c5c2-4756-a547-b348cb68b24b {{(pid=61995) external_instance_event /opt/stack/nova/nova/compute/manager.py:11144}} [ 538.205518] env[61995]: DEBUG nova.compute.manager [req-c1d7c08d-07bd-48f9-8133-575f4c703ec0 req-df8b9954-a4c2-4d49-8ad6-1f17dc263a08 service nova] [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] Refreshing instance network info cache due to event network-changed-a37484e9-c5c2-4756-a547-b348cb68b24b. {{(pid=61995) external_instance_event /opt/stack/nova/nova/compute/manager.py:11149}} [ 538.205686] env[61995]: DEBUG oslo_concurrency.lockutils [req-c1d7c08d-07bd-48f9-8133-575f4c703ec0 req-df8b9954-a4c2-4d49-8ad6-1f17dc263a08 service nova] Acquiring lock "refresh_cache-b610e51c-f7a4-4e3a-85c1-28603fc82bab" {{(pid=61995) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 538.206024] env[61995]: DEBUG oslo_concurrency.lockutils [req-c1d7c08d-07bd-48f9-8133-575f4c703ec0 req-df8b9954-a4c2-4d49-8ad6-1f17dc263a08 service nova] Acquired lock "refresh_cache-b610e51c-f7a4-4e3a-85c1-28603fc82bab" {{(pid=61995) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 538.206273] env[61995]: DEBUG nova.network.neutron [req-c1d7c08d-07bd-48f9-8133-575f4c703ec0 req-df8b9954-a4c2-4d49-8ad6-1f17dc263a08 service nova] [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] Refreshing network info cache for port a37484e9-c5c2-4756-a547-b348cb68b24b {{(pid=61995) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 538.206727] env[61995]: INFO oslo_messaging._drivers.amqpdriver [req-c1d7c08d-07bd-48f9-8133-575f4c703ec0 req-df8b9954-a4c2-4d49-8ad6-1f17dc263a08 service nova] Expecting reply to msg 890ff891a8c14c0db3b489697337af8c in queue reply_757213bc08bb49dab178826d88b76f40 [ 538.214517] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 890ff891a8c14c0db3b489697337af8c [ 538.265743] env[61995]: DEBUG nova.compute.manager [None req-0dc1772d-a51f-475a-9ca5-0993724635e0 tempest-ServerDiagnosticsV248Test-1964762263 tempest-ServerDiagnosticsV248Test-1964762263-project-admin] [instance: 5527576a-d56d-42c3-a7f7-02c66c0d1b3d] Checking state {{(pid=61995) _get_power_state /opt/stack/nova/nova/compute/manager.py:1800}} [ 538.266957] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5cd8386-8f89-4c8b-8439-71210e021443 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.273953] env[61995]: INFO nova.compute.manager [None req-0dc1772d-a51f-475a-9ca5-0993724635e0 tempest-ServerDiagnosticsV248Test-1964762263 tempest-ServerDiagnosticsV248Test-1964762263-project-admin] [instance: 5527576a-d56d-42c3-a7f7-02c66c0d1b3d] Retrieving diagnostics [ 538.275026] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e59b3a12-ca26-457a-a954-100fd9ab911a {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.354078] env[61995]: INFO nova.compute.manager [None req-e7c4bbb5-54e6-4867-acb4-2fd4f1abacca tempest-FloatingIPsAssociationNegativeTestJSON-1491050792 tempest-FloatingIPsAssociationNegativeTestJSON-1491050792-project-member] [instance: cd3d1cbb-be77-4fd0-9666-0d544a19a16d] Took 1.04 seconds to deallocate network for instance. [ 538.355916] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-e7c4bbb5-54e6-4867-acb4-2fd4f1abacca tempest-FloatingIPsAssociationNegativeTestJSON-1491050792 tempest-FloatingIPsAssociationNegativeTestJSON-1491050792-project-member] Expecting reply to msg ff0603bcb165446a9bcadba927ab65f6 in queue reply_757213bc08bb49dab178826d88b76f40 [ 538.368340] env[61995]: DEBUG nova.network.neutron [None req-b5402465-5749-4221-a8d2-1de6379c27b6 tempest-VolumesAdminNegativeTest-869695280 tempest-VolumesAdminNegativeTest-869695280-project-member] [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] Instance cache missing network info. {{(pid=61995) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 538.417219] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ff0603bcb165446a9bcadba927ab65f6 [ 538.447915] env[61995]: DEBUG nova.network.neutron [-] [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] Updating instance_info_cache with network_info: [] {{(pid=61995) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 538.448462] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg a5533095eae44acfa975dc1a1b59c31a in queue reply_757213bc08bb49dab178826d88b76f40 [ 538.475839] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a5533095eae44acfa975dc1a1b59c31a [ 538.488701] env[61995]: DEBUG oslo_concurrency.lockutils [None req-95ce5374-e6a2-4567-a19e-2479f98bd091 tempest-VolumesAssistedSnapshotsTest-926918021 tempest-VolumesAssistedSnapshotsTest-926918021-project-member] Acquiring lock "6fffc743-caaa-4356-9406-6bdc6321aa1f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 538.488921] env[61995]: DEBUG oslo_concurrency.lockutils [None req-95ce5374-e6a2-4567-a19e-2479f98bd091 tempest-VolumesAssistedSnapshotsTest-926918021 tempest-VolumesAssistedSnapshotsTest-926918021-project-member] Lock "6fffc743-caaa-4356-9406-6bdc6321aa1f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 538.489640] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-95ce5374-e6a2-4567-a19e-2479f98bd091 tempest-VolumesAssistedSnapshotsTest-926918021 tempest-VolumesAssistedSnapshotsTest-926918021-project-member] Expecting reply to msg 0c9fea255a3648d09d841a183c390c7d in queue reply_757213bc08bb49dab178826d88b76f40 [ 538.502282] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Expecting reply to msg 864424bdf70840a2ac0eb7bf3882405d in queue reply_757213bc08bb49dab178826d88b76f40 [ 538.511420] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0c9fea255a3648d09d841a183c390c7d [ 538.513311] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 864424bdf70840a2ac0eb7bf3882405d [ 538.634471] env[61995]: DEBUG nova.network.neutron [None req-b5402465-5749-4221-a8d2-1de6379c27b6 tempest-VolumesAdminNegativeTest-869695280 tempest-VolumesAdminNegativeTest-869695280-project-member] [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] Updating instance_info_cache with network_info: [] {{(pid=61995) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 538.634668] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-b5402465-5749-4221-a8d2-1de6379c27b6 tempest-VolumesAdminNegativeTest-869695280 tempest-VolumesAdminNegativeTest-869695280-project-member] Expecting reply to msg 47a8de9a5d374b4c90cefc61a3142781 in queue reply_757213bc08bb49dab178826d88b76f40 [ 538.646571] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 47a8de9a5d374b4c90cefc61a3142781 [ 538.660556] env[61995]: DEBUG nova.compute.resource_tracker [None req-71c2fabe-40c2-4828-87e6-8ede90ec8b78 None None] Instance 2dbe6731-bab0-4a47-91c7-a1d8b42dcec4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61995) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 538.660823] env[61995]: DEBUG nova.compute.resource_tracker [None req-71c2fabe-40c2-4828-87e6-8ede90ec8b78 None None] Total usable vcpus: 48, total allocated vcpus: 6 {{(pid=61995) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 538.661029] env[61995]: DEBUG nova.compute.resource_tracker [None req-71c2fabe-40c2-4828-87e6-8ede90ec8b78 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1664MB phys_disk=200GB used_disk=6GB total_vcpus=48 used_vcpus=6 pci_stats=[] {{(pid=61995) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 538.775527] env[61995]: DEBUG nova.network.neutron [req-c1d7c08d-07bd-48f9-8133-575f4c703ec0 req-df8b9954-a4c2-4d49-8ad6-1f17dc263a08 service nova] [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] Instance cache missing network info. {{(pid=61995) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 538.829049] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-979f3c1d-79d5-4d75-b960-ecc679f870ec {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.837123] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-636e01a7-0261-4b11-ae8f-a965199cbf88 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.874337] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-e7c4bbb5-54e6-4867-acb4-2fd4f1abacca tempest-FloatingIPsAssociationNegativeTestJSON-1491050792 tempest-FloatingIPsAssociationNegativeTestJSON-1491050792-project-member] Expecting reply to msg ef00b63984524b20ac292b5d4d656b78 in queue reply_757213bc08bb49dab178826d88b76f40 [ 538.880109] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e09d9f4-4a89-4ee9-b327-8a697a63eef8 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.888715] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0693c321-dfb2-4ca4-9054-d7734331751b {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.893443] env[61995]: DEBUG nova.network.neutron [req-c1d7c08d-07bd-48f9-8133-575f4c703ec0 req-df8b9954-a4c2-4d49-8ad6-1f17dc263a08 service nova] [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] Updating instance_info_cache with network_info: [] {{(pid=61995) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 538.893958] env[61995]: INFO oslo_messaging._drivers.amqpdriver [req-c1d7c08d-07bd-48f9-8133-575f4c703ec0 req-df8b9954-a4c2-4d49-8ad6-1f17dc263a08 service nova] Expecting reply to msg 3b6edabe860740b399701c584a7c6874 in queue reply_757213bc08bb49dab178826d88b76f40 [ 538.906680] env[61995]: DEBUG nova.compute.provider_tree [None req-71c2fabe-40c2-4828-87e6-8ede90ec8b78 None None] Inventory has not changed in ProviderTree for provider: c68e615f-c0c6-4278-bb7b-a579e68878a7 {{(pid=61995) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 538.907109] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-71c2fabe-40c2-4828-87e6-8ede90ec8b78 None None] Expecting reply to msg a80a94b6821243b98d6cc6d2eb7ad471 in queue reply_757213bc08bb49dab178826d88b76f40 [ 538.908815] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3b6edabe860740b399701c584a7c6874 [ 538.916915] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a80a94b6821243b98d6cc6d2eb7ad471 [ 538.924322] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ef00b63984524b20ac292b5d4d656b78 [ 538.950922] env[61995]: INFO nova.compute.manager [-] [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] Took 1.05 seconds to deallocate network for instance. [ 538.953865] env[61995]: DEBUG nova.compute.claims [None req-f3a8686d-cadc-4239-bfc5-8c2bb0366eb3 tempest-ServerDiagnosticsTest-1806019006 tempest-ServerDiagnosticsTest-1806019006-project-member] [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] Aborting claim: {{(pid=61995) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 538.953865] env[61995]: DEBUG oslo_concurrency.lockutils [None req-f3a8686d-cadc-4239-bfc5-8c2bb0366eb3 tempest-ServerDiagnosticsTest-1806019006 tempest-ServerDiagnosticsTest-1806019006-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 538.996644] env[61995]: DEBUG nova.compute.manager [None req-95ce5374-e6a2-4567-a19e-2479f98bd091 tempest-VolumesAssistedSnapshotsTest-926918021 tempest-VolumesAssistedSnapshotsTest-926918021-project-member] [instance: 6fffc743-caaa-4356-9406-6bdc6321aa1f] Starting instance... {{(pid=61995) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 538.996644] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-95ce5374-e6a2-4567-a19e-2479f98bd091 tempest-VolumesAssistedSnapshotsTest-926918021 tempest-VolumesAssistedSnapshotsTest-926918021-project-member] Expecting reply to msg bc301319d8fc4e5fb6eedfe93c7b0e83 in queue reply_757213bc08bb49dab178826d88b76f40 [ 539.006967] env[61995]: INFO nova.compute.manager [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] [instance: 1bf7f97f-ccfe-4ee0-a6fb-2234769c4b62] Rebuilding instance [ 539.038574] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bc301319d8fc4e5fb6eedfe93c7b0e83 [ 539.061648] env[61995]: DEBUG nova.compute.manager [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] [instance: 1bf7f97f-ccfe-4ee0-a6fb-2234769c4b62] Checking state {{(pid=61995) _get_power_state /opt/stack/nova/nova/compute/manager.py:1800}} [ 539.062597] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-726d7252-dedf-486a-9d3f-8050bc0917b2 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 539.071779] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Expecting reply to msg 503a8c0253f447bfb5f9ce65331faa48 in queue reply_757213bc08bb49dab178826d88b76f40 [ 539.126138] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 503a8c0253f447bfb5f9ce65331faa48 [ 539.137324] env[61995]: DEBUG oslo_concurrency.lockutils [None req-b5402465-5749-4221-a8d2-1de6379c27b6 tempest-VolumesAdminNegativeTest-869695280 tempest-VolumesAdminNegativeTest-869695280-project-member] Releasing lock "refresh_cache-cfffe8e0-4074-41c9-8e1b-49d621fc3c1b" {{(pid=61995) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 539.137721] env[61995]: DEBUG nova.compute.manager [None req-b5402465-5749-4221-a8d2-1de6379c27b6 tempest-VolumesAdminNegativeTest-869695280 tempest-VolumesAdminNegativeTest-869695280-project-member] [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] Start destroying the instance on the hypervisor. {{(pid=61995) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 539.137974] env[61995]: DEBUG nova.virt.vmwareapi.vmops [None req-b5402465-5749-4221-a8d2-1de6379c27b6 tempest-VolumesAdminNegativeTest-869695280 tempest-VolumesAdminNegativeTest-869695280-project-member] [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] Destroying instance {{(pid=61995) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 539.138286] env[61995]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-64c7f793-586c-4f88-bd9b-f0a35ab0aa2f {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 539.147252] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96b7cb6a-ca9d-440d-b052-5bc15af60397 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 539.170192] env[61995]: WARNING nova.virt.vmwareapi.vmops [None req-b5402465-5749-4221-a8d2-1de6379c27b6 tempest-VolumesAdminNegativeTest-869695280 tempest-VolumesAdminNegativeTest-869695280-project-member] [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance cfffe8e0-4074-41c9-8e1b-49d621fc3c1b could not be found. [ 539.170432] env[61995]: DEBUG nova.virt.vmwareapi.vmops [None req-b5402465-5749-4221-a8d2-1de6379c27b6 tempest-VolumesAdminNegativeTest-869695280 tempest-VolumesAdminNegativeTest-869695280-project-member] [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] Instance destroyed {{(pid=61995) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 539.170603] env[61995]: INFO nova.compute.manager [None req-b5402465-5749-4221-a8d2-1de6379c27b6 tempest-VolumesAdminNegativeTest-869695280 tempest-VolumesAdminNegativeTest-869695280-project-member] [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] Took 0.03 seconds to destroy the instance on the hypervisor. [ 539.170852] env[61995]: DEBUG oslo.service.loopingcall [None req-b5402465-5749-4221-a8d2-1de6379c27b6 tempest-VolumesAdminNegativeTest-869695280 tempest-VolumesAdminNegativeTest-869695280-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61995) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 539.171070] env[61995]: DEBUG nova.compute.manager [-] [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] Deallocating network for instance {{(pid=61995) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2298}} [ 539.171159] env[61995]: DEBUG nova.network.neutron [-] [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] deallocate_for_instance() {{(pid=61995) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 539.213040] env[61995]: DEBUG nova.network.neutron [-] [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] Instance cache missing network info. {{(pid=61995) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 539.213582] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg f8086a0edd184293aa1ddc982f557f58 in queue reply_757213bc08bb49dab178826d88b76f40 [ 539.225167] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f8086a0edd184293aa1ddc982f557f58 [ 539.396481] env[61995]: DEBUG oslo_concurrency.lockutils [req-c1d7c08d-07bd-48f9-8133-575f4c703ec0 req-df8b9954-a4c2-4d49-8ad6-1f17dc263a08 service nova] Releasing lock "refresh_cache-b610e51c-f7a4-4e3a-85c1-28603fc82bab" {{(pid=61995) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 539.399059] env[61995]: INFO nova.scheduler.client.report [None req-e7c4bbb5-54e6-4867-acb4-2fd4f1abacca tempest-FloatingIPsAssociationNegativeTestJSON-1491050792 tempest-FloatingIPsAssociationNegativeTestJSON-1491050792-project-member] Deleted allocations for instance cd3d1cbb-be77-4fd0-9666-0d544a19a16d [ 539.405227] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-e7c4bbb5-54e6-4867-acb4-2fd4f1abacca tempest-FloatingIPsAssociationNegativeTestJSON-1491050792 tempest-FloatingIPsAssociationNegativeTestJSON-1491050792-project-member] Expecting reply to msg 508689543b6e4172b96fdbc16055a512 in queue reply_757213bc08bb49dab178826d88b76f40 [ 539.410055] env[61995]: DEBUG nova.scheduler.client.report [None req-71c2fabe-40c2-4828-87e6-8ede90ec8b78 None None] Inventory has not changed for provider c68e615f-c0c6-4278-bb7b-a579e68878a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61995) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 539.414277] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-71c2fabe-40c2-4828-87e6-8ede90ec8b78 None None] Expecting reply to msg 3f57ba52bd134ec8bffd83aef67acad7 in queue reply_757213bc08bb49dab178826d88b76f40 [ 539.442248] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 508689543b6e4172b96fdbc16055a512 [ 539.478021] env[61995]: DEBUG nova.compute.manager [req-ff3b88e4-0198-40ce-abab-5c2dddf0a5cb req-192ddb7e-18ae-4994-a96b-f1904880b0c8 service nova] [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] Received event network-vif-deleted-b996cfab-151e-438f-929d-3393ad4e64f7 {{(pid=61995) external_instance_event /opt/stack/nova/nova/compute/manager.py:11144}} [ 539.478258] env[61995]: DEBUG nova.compute.manager [req-ff3b88e4-0198-40ce-abab-5c2dddf0a5cb req-192ddb7e-18ae-4994-a96b-f1904880b0c8 service nova] [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] Received event network-changed-44c93bf1-8e65-4c5c-bbf7-fa9b74625351 {{(pid=61995) external_instance_event /opt/stack/nova/nova/compute/manager.py:11144}} [ 539.478447] env[61995]: DEBUG nova.compute.manager [req-ff3b88e4-0198-40ce-abab-5c2dddf0a5cb req-192ddb7e-18ae-4994-a96b-f1904880b0c8 service nova] [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] Refreshing instance network info cache due to event network-changed-44c93bf1-8e65-4c5c-bbf7-fa9b74625351. {{(pid=61995) external_instance_event /opt/stack/nova/nova/compute/manager.py:11149}} [ 539.478661] env[61995]: DEBUG oslo_concurrency.lockutils [req-ff3b88e4-0198-40ce-abab-5c2dddf0a5cb req-192ddb7e-18ae-4994-a96b-f1904880b0c8 service nova] Acquiring lock "refresh_cache-2a07c11a-4e95-47db-bf50-5ad720403faa" {{(pid=61995) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 539.478823] env[61995]: DEBUG oslo_concurrency.lockutils [req-ff3b88e4-0198-40ce-abab-5c2dddf0a5cb req-192ddb7e-18ae-4994-a96b-f1904880b0c8 service nova] Acquired lock "refresh_cache-2a07c11a-4e95-47db-bf50-5ad720403faa" {{(pid=61995) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 539.479084] env[61995]: DEBUG nova.network.neutron [req-ff3b88e4-0198-40ce-abab-5c2dddf0a5cb req-192ddb7e-18ae-4994-a96b-f1904880b0c8 service nova] [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] Refreshing network info cache for port 44c93bf1-8e65-4c5c-bbf7-fa9b74625351 {{(pid=61995) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 539.479376] env[61995]: INFO oslo_messaging._drivers.amqpdriver [req-ff3b88e4-0198-40ce-abab-5c2dddf0a5cb req-192ddb7e-18ae-4994-a96b-f1904880b0c8 service nova] Expecting reply to msg 04185f5b4c7d42e5a609014bd17fb3db in queue reply_757213bc08bb49dab178826d88b76f40 [ 539.492632] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 04185f5b4c7d42e5a609014bd17fb3db [ 539.500610] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3f57ba52bd134ec8bffd83aef67acad7 [ 539.527073] env[61995]: DEBUG oslo_concurrency.lockutils [None req-95ce5374-e6a2-4567-a19e-2479f98bd091 tempest-VolumesAssistedSnapshotsTest-926918021 tempest-VolumesAssistedSnapshotsTest-926918021-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 539.577337] env[61995]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] [instance: 1bf7f97f-ccfe-4ee0-a6fb-2234769c4b62] Powering off the VM {{(pid=61995) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 539.577337] env[61995]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b93d6c84-0b43-4c54-8245-7f197aea9564 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 539.584976] env[61995]: DEBUG oslo_vmware.api [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Waiting for the task: (returnval){ [ 539.584976] env[61995]: value = "task-378080" [ 539.584976] env[61995]: _type = "Task" [ 539.584976] env[61995]: } to complete. {{(pid=61995) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 539.594929] env[61995]: DEBUG oslo_vmware.api [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Task: {'id': task-378080, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61995) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 539.716137] env[61995]: DEBUG nova.network.neutron [-] [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] Updating instance_info_cache with network_info: [] {{(pid=61995) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 539.716658] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 2753393a09bb486d81514dc13d78211c in queue reply_757213bc08bb49dab178826d88b76f40 [ 539.726024] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2753393a09bb486d81514dc13d78211c [ 539.920425] env[61995]: DEBUG oslo_concurrency.lockutils [None req-e7c4bbb5-54e6-4867-acb4-2fd4f1abacca tempest-FloatingIPsAssociationNegativeTestJSON-1491050792 tempest-FloatingIPsAssociationNegativeTestJSON-1491050792-project-member] Lock "cd3d1cbb-be77-4fd0-9666-0d544a19a16d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.874s {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 539.920425] env[61995]: DEBUG nova.compute.resource_tracker [None req-71c2fabe-40c2-4828-87e6-8ede90ec8b78 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61995) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 539.920425] env[61995]: DEBUG oslo_concurrency.lockutils [None req-71c2fabe-40c2-4828-87e6-8ede90ec8b78 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.797s {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 539.920425] env[61995]: DEBUG oslo_concurrency.lockutils [None req-6edc7ef5-8dea-41f2-9cd6-a09a4eb02c4e tempest-ServerExternalEventsTest-367181382 tempest-ServerExternalEventsTest-367181382-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 4.873s {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 539.920425] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-6edc7ef5-8dea-41f2-9cd6-a09a4eb02c4e tempest-ServerExternalEventsTest-367181382 tempest-ServerExternalEventsTest-367181382-project-member] Expecting reply to msg 3d4906bde8c546ebaf207e4164c75bf3 in queue reply_757213bc08bb49dab178826d88b76f40 [ 539.973040] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3d4906bde8c546ebaf207e4164c75bf3 [ 540.008807] env[61995]: DEBUG nova.network.neutron [req-ff3b88e4-0198-40ce-abab-5c2dddf0a5cb req-192ddb7e-18ae-4994-a96b-f1904880b0c8 service nova] [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] Instance cache missing network info. {{(pid=61995) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 540.077444] env[61995]: DEBUG nova.network.neutron [req-ff3b88e4-0198-40ce-abab-5c2dddf0a5cb req-192ddb7e-18ae-4994-a96b-f1904880b0c8 service nova] [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] Updating instance_info_cache with network_info: [] {{(pid=61995) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 540.077995] env[61995]: INFO oslo_messaging._drivers.amqpdriver [req-ff3b88e4-0198-40ce-abab-5c2dddf0a5cb req-192ddb7e-18ae-4994-a96b-f1904880b0c8 service nova] Expecting reply to msg 11ea1ae5057b4df0a93b279f9e8f3de6 in queue reply_757213bc08bb49dab178826d88b76f40 [ 540.095273] env[61995]: DEBUG oslo_vmware.api [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Task: {'id': task-378080, 'name': PowerOffVM_Task, 'duration_secs': 0.181045} completed successfully. {{(pid=61995) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 540.096257] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 11ea1ae5057b4df0a93b279f9e8f3de6 [ 540.097068] env[61995]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] [instance: 1bf7f97f-ccfe-4ee0-a6fb-2234769c4b62] Powered off the VM {{(pid=61995) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 540.097442] env[61995]: DEBUG nova.virt.vmwareapi.vmops [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] [instance: 1bf7f97f-ccfe-4ee0-a6fb-2234769c4b62] Destroying instance {{(pid=61995) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 540.098426] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d81c9d6-430e-4980-b600-7da41bf573ab {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 540.109185] env[61995]: DEBUG nova.virt.vmwareapi.vmops [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] [instance: 1bf7f97f-ccfe-4ee0-a6fb-2234769c4b62] Unregistering the VM {{(pid=61995) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 540.109819] env[61995]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-eb350206-daba-4b5d-bf7a-814f674aa56f {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 540.115869] env[61995]: DEBUG oslo_concurrency.lockutils [None req-9ace5a90-e467-428a-82b2-7a1c680dfdf4 tempest-ServersAdminNegativeTestJSON-2047713534 tempest-ServersAdminNegativeTestJSON-2047713534-project-member] Acquiring lock "aa885305-7cdb-44d3-9bd2-880eb042a63e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 540.116362] env[61995]: DEBUG oslo_concurrency.lockutils [None req-9ace5a90-e467-428a-82b2-7a1c680dfdf4 tempest-ServersAdminNegativeTestJSON-2047713534 tempest-ServersAdminNegativeTestJSON-2047713534-project-member] Lock "aa885305-7cdb-44d3-9bd2-880eb042a63e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 540.117336] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-9ace5a90-e467-428a-82b2-7a1c680dfdf4 tempest-ServersAdminNegativeTestJSON-2047713534 tempest-ServersAdminNegativeTestJSON-2047713534-project-member] Expecting reply to msg 6baf4df92e71403bafb647a3cacd0915 in queue reply_757213bc08bb49dab178826d88b76f40 [ 540.134443] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6baf4df92e71403bafb647a3cacd0915 [ 540.136885] env[61995]: DEBUG nova.virt.vmwareapi.vmops [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] [instance: 1bf7f97f-ccfe-4ee0-a6fb-2234769c4b62] Unregistered the VM {{(pid=61995) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 540.137337] env[61995]: DEBUG nova.virt.vmwareapi.vmops [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] [instance: 1bf7f97f-ccfe-4ee0-a6fb-2234769c4b62] Deleting contents of the VM from datastore datastore1 {{(pid=61995) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 540.137830] env[61995]: DEBUG nova.virt.vmwareapi.ds_util [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Deleting the datastore file [datastore1] 1bf7f97f-ccfe-4ee0-a6fb-2234769c4b62 {{(pid=61995) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 540.138295] env[61995]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-15efddbf-5891-4e2b-ba8d-f9189b61d79b {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 540.147077] env[61995]: DEBUG oslo_vmware.api [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Waiting for the task: (returnval){ [ 540.147077] env[61995]: value = "task-378082" [ 540.147077] env[61995]: _type = "Task" [ 540.147077] env[61995]: } to complete. {{(pid=61995) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 540.159114] env[61995]: DEBUG oslo_vmware.api [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Task: {'id': task-378082, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61995) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 540.221210] env[61995]: INFO nova.compute.manager [-] [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] Took 1.05 seconds to deallocate network for instance. [ 540.222421] env[61995]: DEBUG nova.compute.claims [None req-b5402465-5749-4221-a8d2-1de6379c27b6 tempest-VolumesAdminNegativeTest-869695280 tempest-VolumesAdminNegativeTest-869695280-project-member] [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] Aborting claim: {{(pid=61995) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 540.222421] env[61995]: DEBUG oslo_concurrency.lockutils [None req-b5402465-5749-4221-a8d2-1de6379c27b6 tempest-VolumesAdminNegativeTest-869695280 tempest-VolumesAdminNegativeTest-869695280-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 540.569940] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5432bcae-deb7-4d3c-a65c-2e70b05e0cbd {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 540.584979] env[61995]: DEBUG oslo_concurrency.lockutils [req-ff3b88e4-0198-40ce-abab-5c2dddf0a5cb req-192ddb7e-18ae-4994-a96b-f1904880b0c8 service nova] Releasing lock "refresh_cache-2a07c11a-4e95-47db-bf50-5ad720403faa" {{(pid=61995) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 540.585295] env[61995]: DEBUG nova.compute.manager [req-ff3b88e4-0198-40ce-abab-5c2dddf0a5cb req-192ddb7e-18ae-4994-a96b-f1904880b0c8 service nova] [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] Received event network-vif-deleted-44c93bf1-8e65-4c5c-bbf7-fa9b74625351 {{(pid=61995) external_instance_event /opt/stack/nova/nova/compute/manager.py:11144}} [ 540.587245] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8368733a-690d-4039-b501-90a458651a8c {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 540.636706] env[61995]: DEBUG nova.compute.manager [None req-9ace5a90-e467-428a-82b2-7a1c680dfdf4 tempest-ServersAdminNegativeTestJSON-2047713534 tempest-ServersAdminNegativeTestJSON-2047713534-project-member] [instance: aa885305-7cdb-44d3-9bd2-880eb042a63e] Starting instance... {{(pid=61995) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 540.638515] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-9ace5a90-e467-428a-82b2-7a1c680dfdf4 tempest-ServersAdminNegativeTestJSON-2047713534 tempest-ServersAdminNegativeTestJSON-2047713534-project-member] Expecting reply to msg aa94cd32cec043e5a7b7b2ef8f6b2ff9 in queue reply_757213bc08bb49dab178826d88b76f40 [ 540.650920] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a1f907a-9c61-4a02-86a5-dd329fb2d7d5 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 540.664897] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06d08c94-bbd2-4e19-bcd0-ee9e39cd3eff {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 540.669323] env[61995]: DEBUG oslo_vmware.api [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Task: {'id': task-378082, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.206793} completed successfully. {{(pid=61995) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 540.669915] env[61995]: DEBUG nova.virt.vmwareapi.ds_util [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Deleted the datastore file {{(pid=61995) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 540.669915] env[61995]: DEBUG nova.virt.vmwareapi.vmops [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] [instance: 1bf7f97f-ccfe-4ee0-a6fb-2234769c4b62] Deleted contents of the VM from datastore datastore1 {{(pid=61995) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 540.670036] env[61995]: DEBUG nova.virt.vmwareapi.vmops [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] [instance: 1bf7f97f-ccfe-4ee0-a6fb-2234769c4b62] Instance destroyed {{(pid=61995) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 540.671514] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Expecting reply to msg c63bf068767849b8b5934eb61a20175b in queue reply_757213bc08bb49dab178826d88b76f40 [ 540.690706] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg aa94cd32cec043e5a7b7b2ef8f6b2ff9 [ 540.691612] env[61995]: DEBUG nova.compute.provider_tree [None req-6edc7ef5-8dea-41f2-9cd6-a09a4eb02c4e tempest-ServerExternalEventsTest-367181382 tempest-ServerExternalEventsTest-367181382-project-member] Inventory has not changed in ProviderTree for provider: c68e615f-c0c6-4278-bb7b-a579e68878a7 {{(pid=61995) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 540.692094] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-6edc7ef5-8dea-41f2-9cd6-a09a4eb02c4e tempest-ServerExternalEventsTest-367181382 tempest-ServerExternalEventsTest-367181382-project-member] Expecting reply to msg e983263cc3374be59e39137531f83a72 in queue reply_757213bc08bb49dab178826d88b76f40 [ 540.704816] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e983263cc3374be59e39137531f83a72 [ 540.717742] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c63bf068767849b8b5934eb61a20175b [ 541.175028] env[61995]: DEBUG oslo_concurrency.lockutils [None req-9ace5a90-e467-428a-82b2-7a1c680dfdf4 tempest-ServersAdminNegativeTestJSON-2047713534 tempest-ServersAdminNegativeTestJSON-2047713534-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 541.194697] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Expecting reply to msg eb47cd130ca04ba0af1282a9fd2de286 in queue reply_757213bc08bb49dab178826d88b76f40 [ 541.196201] env[61995]: DEBUG nova.scheduler.client.report [None req-6edc7ef5-8dea-41f2-9cd6-a09a4eb02c4e tempest-ServerExternalEventsTest-367181382 tempest-ServerExternalEventsTest-367181382-project-member] Inventory has not changed for provider c68e615f-c0c6-4278-bb7b-a579e68878a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61995) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 541.198404] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-6edc7ef5-8dea-41f2-9cd6-a09a4eb02c4e tempest-ServerExternalEventsTest-367181382 tempest-ServerExternalEventsTest-367181382-project-member] Expecting reply to msg 7885ed67dc954485a5f0f9d8865638a9 in queue reply_757213bc08bb49dab178826d88b76f40 [ 541.210970] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7885ed67dc954485a5f0f9d8865638a9 [ 541.231916] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg eb47cd130ca04ba0af1282a9fd2de286 [ 541.268223] env[61995]: DEBUG nova.compute.manager [req-e5e95111-02fb-473a-8423-15991567ba82 req-4c29771a-3712-4506-838a-acd7243c51a3 service nova] [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] Received event network-vif-deleted-a37484e9-c5c2-4756-a547-b348cb68b24b {{(pid=61995) external_instance_event /opt/stack/nova/nova/compute/manager.py:11144}} [ 541.268434] env[61995]: DEBUG nova.compute.manager [req-e5e95111-02fb-473a-8423-15991567ba82 req-4c29771a-3712-4506-838a-acd7243c51a3 service nova] [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] Received event network-changed-58958f67-8153-4158-b726-bdfe68b1e491 {{(pid=61995) external_instance_event /opt/stack/nova/nova/compute/manager.py:11144}} [ 541.268590] env[61995]: DEBUG nova.compute.manager [req-e5e95111-02fb-473a-8423-15991567ba82 req-4c29771a-3712-4506-838a-acd7243c51a3 service nova] [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] Refreshing instance network info cache due to event network-changed-58958f67-8153-4158-b726-bdfe68b1e491. {{(pid=61995) external_instance_event /opt/stack/nova/nova/compute/manager.py:11149}} [ 541.268791] env[61995]: DEBUG oslo_concurrency.lockutils [req-e5e95111-02fb-473a-8423-15991567ba82 req-4c29771a-3712-4506-838a-acd7243c51a3 service nova] Acquiring lock "refresh_cache-cfffe8e0-4074-41c9-8e1b-49d621fc3c1b" {{(pid=61995) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 541.268923] env[61995]: DEBUG oslo_concurrency.lockutils [req-e5e95111-02fb-473a-8423-15991567ba82 req-4c29771a-3712-4506-838a-acd7243c51a3 service nova] Acquired lock "refresh_cache-cfffe8e0-4074-41c9-8e1b-49d621fc3c1b" {{(pid=61995) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 541.269072] env[61995]: DEBUG nova.network.neutron [req-e5e95111-02fb-473a-8423-15991567ba82 req-4c29771a-3712-4506-838a-acd7243c51a3 service nova] [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] Refreshing network info cache for port 58958f67-8153-4158-b726-bdfe68b1e491 {{(pid=61995) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 541.269506] env[61995]: INFO oslo_messaging._drivers.amqpdriver [req-e5e95111-02fb-473a-8423-15991567ba82 req-4c29771a-3712-4506-838a-acd7243c51a3 service nova] Expecting reply to msg 3386edc9556f48fe89961a734df33ba8 in queue reply_757213bc08bb49dab178826d88b76f40 [ 541.276638] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3386edc9556f48fe89961a734df33ba8 [ 541.703712] env[61995]: DEBUG oslo_concurrency.lockutils [None req-6edc7ef5-8dea-41f2-9cd6-a09a4eb02c4e tempest-ServerExternalEventsTest-367181382 tempest-ServerExternalEventsTest-367181382-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.786s {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 541.704367] env[61995]: ERROR nova.compute.manager [None req-6edc7ef5-8dea-41f2-9cd6-a09a4eb02c4e tempest-ServerExternalEventsTest-367181382 tempest-ServerExternalEventsTest-367181382-project-member] [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port b996cfab-151e-438f-929d-3393ad4e64f7, please check neutron logs for more information. [ 541.704367] env[61995]: ERROR nova.compute.manager [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] Traceback (most recent call last): [ 541.704367] env[61995]: ERROR nova.compute.manager [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] File "/opt/stack/nova/nova/compute/manager.py", line 2650, in _build_and_run_instance [ 541.704367] env[61995]: ERROR nova.compute.manager [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] self.driver.spawn(context, instance, image_meta, [ 541.704367] env[61995]: ERROR nova.compute.manager [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 541.704367] env[61995]: ERROR nova.compute.manager [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] self._vmops.spawn(context, instance, image_meta, injected_files, [ 541.704367] env[61995]: ERROR nova.compute.manager [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 541.704367] env[61995]: ERROR nova.compute.manager [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] vm_ref = self.build_virtual_machine(instance, [ 541.704367] env[61995]: ERROR nova.compute.manager [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 541.704367] env[61995]: ERROR nova.compute.manager [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] vif_infos = vmwarevif.get_vif_info(self._session, [ 541.704367] env[61995]: ERROR nova.compute.manager [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 541.704801] env[61995]: ERROR nova.compute.manager [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] for vif in network_info: [ 541.704801] env[61995]: ERROR nova.compute.manager [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 541.704801] env[61995]: ERROR nova.compute.manager [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] return self._sync_wrapper(fn, *args, **kwargs) [ 541.704801] env[61995]: ERROR nova.compute.manager [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 541.704801] env[61995]: ERROR nova.compute.manager [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] self.wait() [ 541.704801] env[61995]: ERROR nova.compute.manager [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 541.704801] env[61995]: ERROR nova.compute.manager [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] self[:] = self._gt.wait() [ 541.704801] env[61995]: ERROR nova.compute.manager [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 541.704801] env[61995]: ERROR nova.compute.manager [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] return self._exit_event.wait() [ 541.704801] env[61995]: ERROR nova.compute.manager [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 541.704801] env[61995]: ERROR nova.compute.manager [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] result = hub.switch() [ 541.704801] env[61995]: ERROR nova.compute.manager [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 541.704801] env[61995]: ERROR nova.compute.manager [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] return self.greenlet.switch() [ 541.705187] env[61995]: ERROR nova.compute.manager [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 541.705187] env[61995]: ERROR nova.compute.manager [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] result = function(*args, **kwargs) [ 541.705187] env[61995]: ERROR nova.compute.manager [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 541.705187] env[61995]: ERROR nova.compute.manager [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] return func(*args, **kwargs) [ 541.705187] env[61995]: ERROR nova.compute.manager [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] File "/opt/stack/nova/nova/compute/manager.py", line 2020, in _allocate_network_async [ 541.705187] env[61995]: ERROR nova.compute.manager [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] raise e [ 541.705187] env[61995]: ERROR nova.compute.manager [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] File "/opt/stack/nova/nova/compute/manager.py", line 1998, in _allocate_network_async [ 541.705187] env[61995]: ERROR nova.compute.manager [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] nwinfo = self.network_api.allocate_for_instance( [ 541.705187] env[61995]: ERROR nova.compute.manager [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 541.705187] env[61995]: ERROR nova.compute.manager [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] created_port_ids = self._update_ports_for_instance( [ 541.705187] env[61995]: ERROR nova.compute.manager [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 541.705187] env[61995]: ERROR nova.compute.manager [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] with excutils.save_and_reraise_exception(): [ 541.705187] env[61995]: ERROR nova.compute.manager [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 541.705535] env[61995]: ERROR nova.compute.manager [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] self.force_reraise() [ 541.705535] env[61995]: ERROR nova.compute.manager [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 541.705535] env[61995]: ERROR nova.compute.manager [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] raise self.value [ 541.705535] env[61995]: ERROR nova.compute.manager [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 541.705535] env[61995]: ERROR nova.compute.manager [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] updated_port = self._update_port( [ 541.705535] env[61995]: ERROR nova.compute.manager [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 541.705535] env[61995]: ERROR nova.compute.manager [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] _ensure_no_port_binding_failure(port) [ 541.705535] env[61995]: ERROR nova.compute.manager [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 541.705535] env[61995]: ERROR nova.compute.manager [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] raise exception.PortBindingFailed(port_id=port['id']) [ 541.705535] env[61995]: ERROR nova.compute.manager [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] nova.exception.PortBindingFailed: Binding failed for port b996cfab-151e-438f-929d-3393ad4e64f7, please check neutron logs for more information. [ 541.705535] env[61995]: ERROR nova.compute.manager [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] [ 541.705864] env[61995]: DEBUG nova.compute.utils [None req-6edc7ef5-8dea-41f2-9cd6-a09a4eb02c4e tempest-ServerExternalEventsTest-367181382 tempest-ServerExternalEventsTest-367181382-project-member] [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] Binding failed for port b996cfab-151e-438f-929d-3393ad4e64f7, please check neutron logs for more information. {{(pid=61995) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 541.708280] env[61995]: DEBUG oslo_concurrency.lockutils [None req-eb6c806e-954e-45e9-85aa-88d75b833fb0 tempest-AttachVolumeTestJSON-1605116632 tempest-AttachVolumeTestJSON-1605116632-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.208s {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 541.708280] env[61995]: INFO nova.compute.claims [None req-eb6c806e-954e-45e9-85aa-88d75b833fb0 tempest-AttachVolumeTestJSON-1605116632 tempest-AttachVolumeTestJSON-1605116632-project-member] [instance: b819920a-cc74-4718-a054-e81affabcd5b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 541.711249] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-eb6c806e-954e-45e9-85aa-88d75b833fb0 tempest-AttachVolumeTestJSON-1605116632 tempest-AttachVolumeTestJSON-1605116632-project-member] Expecting reply to msg 130a412c426a41a88354825211c19dc7 in queue reply_757213bc08bb49dab178826d88b76f40 [ 541.712074] env[61995]: DEBUG nova.compute.manager [None req-6edc7ef5-8dea-41f2-9cd6-a09a4eb02c4e tempest-ServerExternalEventsTest-367181382 tempest-ServerExternalEventsTest-367181382-project-member] [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] Build of instance 4c0dd905-f751-4cb4-9be1-ef06518990f8 was re-scheduled: Binding failed for port b996cfab-151e-438f-929d-3393ad4e64f7, please check neutron logs for more information. {{(pid=61995) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2489}} [ 541.713756] env[61995]: DEBUG nova.compute.manager [None req-6edc7ef5-8dea-41f2-9cd6-a09a4eb02c4e tempest-ServerExternalEventsTest-367181382 tempest-ServerExternalEventsTest-367181382-project-member] [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] Unplugging VIFs for instance {{(pid=61995) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3015}} [ 541.717206] env[61995]: DEBUG oslo_concurrency.lockutils [None req-6edc7ef5-8dea-41f2-9cd6-a09a4eb02c4e tempest-ServerExternalEventsTest-367181382 tempest-ServerExternalEventsTest-367181382-project-member] Acquiring lock "refresh_cache-4c0dd905-f751-4cb4-9be1-ef06518990f8" {{(pid=61995) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 541.717206] env[61995]: DEBUG oslo_concurrency.lockutils [None req-6edc7ef5-8dea-41f2-9cd6-a09a4eb02c4e tempest-ServerExternalEventsTest-367181382 tempest-ServerExternalEventsTest-367181382-project-member] Acquired lock "refresh_cache-4c0dd905-f751-4cb4-9be1-ef06518990f8" {{(pid=61995) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 541.717206] env[61995]: DEBUG nova.network.neutron [None req-6edc7ef5-8dea-41f2-9cd6-a09a4eb02c4e tempest-ServerExternalEventsTest-367181382 tempest-ServerExternalEventsTest-367181382-project-member] [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] Building network info cache for instance {{(pid=61995) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 541.717206] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-6edc7ef5-8dea-41f2-9cd6-a09a4eb02c4e tempest-ServerExternalEventsTest-367181382 tempest-ServerExternalEventsTest-367181382-project-member] Expecting reply to msg 876e118f98254fb99cb6883e3116b15d in queue reply_757213bc08bb49dab178826d88b76f40 [ 541.725880] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 876e118f98254fb99cb6883e3116b15d [ 541.740333] env[61995]: DEBUG nova.virt.hardware [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-06T07:57:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-06T07:57:11Z,direct_url=,disk_format='vmdk',id=947125f0-9664-40eb-953e-b1373b076c9f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3dc1e6434ef44fd0be01fd4ebf2b62ef',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-06T07:57:12Z,virtual_size=,visibility=), allow threads: False {{(pid=61995) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 541.740636] env[61995]: DEBUG nova.virt.hardware [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Flavor limits 0:0:0 {{(pid=61995) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 541.740844] env[61995]: DEBUG nova.virt.hardware [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Image limits 0:0:0 {{(pid=61995) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 541.741194] env[61995]: DEBUG nova.virt.hardware [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Flavor pref 0:0:0 {{(pid=61995) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 541.741385] env[61995]: DEBUG nova.virt.hardware [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Image pref 0:0:0 {{(pid=61995) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 541.741569] env[61995]: DEBUG nova.virt.hardware [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61995) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 541.741818] env[61995]: DEBUG nova.virt.hardware [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61995) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 541.742020] env[61995]: DEBUG nova.virt.hardware [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61995) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 541.742254] env[61995]: DEBUG nova.virt.hardware [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Got 1 possible topologies {{(pid=61995) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 541.742475] env[61995]: DEBUG nova.virt.hardware [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61995) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 541.742703] env[61995]: DEBUG nova.virt.hardware [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61995) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 541.744135] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf44757c-c25d-4d4c-b757-97e20c3d1a19 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.752742] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec5106d1-f756-42b0-b951-23aca238a89b {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.758438] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 130a412c426a41a88354825211c19dc7 [ 541.769096] env[61995]: DEBUG nova.virt.vmwareapi.vmops [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] [instance: 1bf7f97f-ccfe-4ee0-a6fb-2234769c4b62] Instance VIF info [] {{(pid=61995) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 541.776274] env[61995]: DEBUG oslo.service.loopingcall [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61995) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 541.779413] env[61995]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1bf7f97f-ccfe-4ee0-a6fb-2234769c4b62] Creating VM on the ESX host {{(pid=61995) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 541.779926] env[61995]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-45729a3a-12ad-450e-aaa1-8bf903babd20 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.801030] env[61995]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 541.801030] env[61995]: value = "task-378083" [ 541.801030] env[61995]: _type = "Task" [ 541.801030] env[61995]: } to complete. {{(pid=61995) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 541.812311] env[61995]: DEBUG oslo_vmware.api [-] Task: {'id': task-378083, 'name': CreateVM_Task} progress is 0%. {{(pid=61995) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 541.842326] env[61995]: DEBUG nova.network.neutron [req-e5e95111-02fb-473a-8423-15991567ba82 req-4c29771a-3712-4506-838a-acd7243c51a3 service nova] [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] Instance cache missing network info. {{(pid=61995) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 542.055939] env[61995]: DEBUG nova.network.neutron [req-e5e95111-02fb-473a-8423-15991567ba82 req-4c29771a-3712-4506-838a-acd7243c51a3 service nova] [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] Updating instance_info_cache with network_info: [] {{(pid=61995) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 542.056572] env[61995]: INFO oslo_messaging._drivers.amqpdriver [req-e5e95111-02fb-473a-8423-15991567ba82 req-4c29771a-3712-4506-838a-acd7243c51a3 service nova] Expecting reply to msg 5b4fe8d9627843b7bfe8571ed1257b2e in queue reply_757213bc08bb49dab178826d88b76f40 [ 542.066724] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5b4fe8d9627843b7bfe8571ed1257b2e [ 542.219623] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-eb6c806e-954e-45e9-85aa-88d75b833fb0 tempest-AttachVolumeTestJSON-1605116632 tempest-AttachVolumeTestJSON-1605116632-project-member] Expecting reply to msg 0736e0ab238145f39caca35ad886ea67 in queue reply_757213bc08bb49dab178826d88b76f40 [ 542.228487] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0736e0ab238145f39caca35ad886ea67 [ 542.251020] env[61995]: DEBUG nova.network.neutron [None req-6edc7ef5-8dea-41f2-9cd6-a09a4eb02c4e tempest-ServerExternalEventsTest-367181382 tempest-ServerExternalEventsTest-367181382-project-member] [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] Instance cache missing network info. {{(pid=61995) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 542.314964] env[61995]: DEBUG oslo_vmware.api [-] Task: {'id': task-378083, 'name': CreateVM_Task, 'duration_secs': 0.334631} completed successfully. {{(pid=61995) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 542.315343] env[61995]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1bf7f97f-ccfe-4ee0-a6fb-2234769c4b62] Created VM on the ESX host {{(pid=61995) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 542.316998] env[61995]: DEBUG oslo_vmware.service [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd8a49b3-b7f3-4ceb-a48a-c453ca81ad21 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.331682] env[61995]: DEBUG oslo_concurrency.lockutils [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/947125f0-9664-40eb-953e-b1373b076c9f" {{(pid=61995) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 542.331682] env[61995]: DEBUG oslo_concurrency.lockutils [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Acquired lock "[datastore2] devstack-image-cache_base/947125f0-9664-40eb-953e-b1373b076c9f" {{(pid=61995) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 542.332805] env[61995]: DEBUG oslo_concurrency.lockutils [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/947125f0-9664-40eb-953e-b1373b076c9f" {{(pid=61995) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 542.333165] env[61995]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4d7795fc-5053-41c8-b202-3012f351bda4 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.341453] env[61995]: DEBUG oslo_vmware.api [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Waiting for the task: (returnval){ [ 542.341453] env[61995]: value = "session[5258be39-d7e5-2e06-99ec-0419db7e1a3e]52f27054-45fb-0ed5-51f6-727a69f227c8" [ 542.341453] env[61995]: _type = "Task" [ 542.341453] env[61995]: } to complete. {{(pid=61995) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 542.359694] env[61995]: DEBUG oslo_concurrency.lockutils [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Releasing lock "[datastore2] devstack-image-cache_base/947125f0-9664-40eb-953e-b1373b076c9f" {{(pid=61995) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 542.360168] env[61995]: DEBUG nova.virt.vmwareapi.vmops [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] [instance: 1bf7f97f-ccfe-4ee0-a6fb-2234769c4b62] Processing image 947125f0-9664-40eb-953e-b1373b076c9f {{(pid=61995) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 542.360543] env[61995]: DEBUG oslo_concurrency.lockutils [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/947125f0-9664-40eb-953e-b1373b076c9f/947125f0-9664-40eb-953e-b1373b076c9f.vmdk" {{(pid=61995) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 542.360789] env[61995]: DEBUG oslo_concurrency.lockutils [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Acquired lock "[datastore2] devstack-image-cache_base/947125f0-9664-40eb-953e-b1373b076c9f/947125f0-9664-40eb-953e-b1373b076c9f.vmdk" {{(pid=61995) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 542.361081] env[61995]: DEBUG nova.virt.vmwareapi.ds_util [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61995) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 542.361894] env[61995]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-96b7a30d-5b84-4fa2-95de-d9128d51f0ef {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.372614] env[61995]: DEBUG nova.virt.vmwareapi.ds_util [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61995) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 542.372614] env[61995]: DEBUG nova.virt.vmwareapi.vmops [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61995) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 542.373247] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec11b46b-2b0e-4fba-8490-82ffcc379456 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.380138] env[61995]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-614fd4e5-6c20-4ec6-b3af-d30f8eae6af1 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.385636] env[61995]: DEBUG oslo_vmware.api [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Waiting for the task: (returnval){ [ 542.385636] env[61995]: value = "session[5258be39-d7e5-2e06-99ec-0419db7e1a3e]52cac88b-2575-87b4-fffe-1ee9e74edca1" [ 542.385636] env[61995]: _type = "Task" [ 542.385636] env[61995]: } to complete. {{(pid=61995) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 542.386983] env[61995]: DEBUG nova.network.neutron [None req-6edc7ef5-8dea-41f2-9cd6-a09a4eb02c4e tempest-ServerExternalEventsTest-367181382 tempest-ServerExternalEventsTest-367181382-project-member] [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] Updating instance_info_cache with network_info: [] {{(pid=61995) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 542.387662] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-6edc7ef5-8dea-41f2-9cd6-a09a4eb02c4e tempest-ServerExternalEventsTest-367181382 tempest-ServerExternalEventsTest-367181382-project-member] Expecting reply to msg 7c9925017aeb485b80350a54947bc64c in queue reply_757213bc08bb49dab178826d88b76f40 [ 542.397728] env[61995]: DEBUG oslo_vmware.api [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Task: {'id': session[5258be39-d7e5-2e06-99ec-0419db7e1a3e]52cac88b-2575-87b4-fffe-1ee9e74edca1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61995) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 542.398522] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7c9925017aeb485b80350a54947bc64c [ 542.559166] env[61995]: DEBUG oslo_concurrency.lockutils [req-e5e95111-02fb-473a-8423-15991567ba82 req-4c29771a-3712-4506-838a-acd7243c51a3 service nova] Releasing lock "refresh_cache-cfffe8e0-4074-41c9-8e1b-49d621fc3c1b" {{(pid=61995) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 542.559317] env[61995]: DEBUG nova.compute.manager [req-e5e95111-02fb-473a-8423-15991567ba82 req-4c29771a-3712-4506-838a-acd7243c51a3 service nova] [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] Received event network-vif-deleted-58958f67-8153-4158-b726-bdfe68b1e491 {{(pid=61995) external_instance_event /opt/stack/nova/nova/compute/manager.py:11144}} [ 542.897182] env[61995]: DEBUG oslo_concurrency.lockutils [None req-6edc7ef5-8dea-41f2-9cd6-a09a4eb02c4e tempest-ServerExternalEventsTest-367181382 tempest-ServerExternalEventsTest-367181382-project-member] Releasing lock "refresh_cache-4c0dd905-f751-4cb4-9be1-ef06518990f8" {{(pid=61995) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 542.897422] env[61995]: DEBUG nova.compute.manager [None req-6edc7ef5-8dea-41f2-9cd6-a09a4eb02c4e tempest-ServerExternalEventsTest-367181382 tempest-ServerExternalEventsTest-367181382-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61995) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3038}} [ 542.900039] env[61995]: DEBUG nova.compute.manager [None req-6edc7ef5-8dea-41f2-9cd6-a09a4eb02c4e tempest-ServerExternalEventsTest-367181382 tempest-ServerExternalEventsTest-367181382-project-member] [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] Deallocating network for instance {{(pid=61995) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2298}} [ 542.900039] env[61995]: DEBUG nova.network.neutron [None req-6edc7ef5-8dea-41f2-9cd6-a09a4eb02c4e tempest-ServerExternalEventsTest-367181382 tempest-ServerExternalEventsTest-367181382-project-member] [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] deallocate_for_instance() {{(pid=61995) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 542.902214] env[61995]: DEBUG nova.virt.vmwareapi.vmops [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] [instance: 1bf7f97f-ccfe-4ee0-a6fb-2234769c4b62] Preparing fetch location {{(pid=61995) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 542.902495] env[61995]: DEBUG nova.virt.vmwareapi.ds_util [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Creating directory with path [datastore2] vmware_temp/c7145544-7e21-4b9f-b0d7-9e51e0569714/947125f0-9664-40eb-953e-b1373b076c9f {{(pid=61995) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 542.903145] env[61995]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-867d27cd-2c89-492b-9da5-0af4c69e2226 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.919946] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abc1202f-981e-46cf-94b9-b45916c0ef98 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.924369] env[61995]: DEBUG nova.virt.vmwareapi.ds_util [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Created directory with path [datastore2] vmware_temp/c7145544-7e21-4b9f-b0d7-9e51e0569714/947125f0-9664-40eb-953e-b1373b076c9f {{(pid=61995) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 542.924604] env[61995]: DEBUG nova.virt.vmwareapi.vmops [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] [instance: 1bf7f97f-ccfe-4ee0-a6fb-2234769c4b62] Fetch image to [datastore2] vmware_temp/c7145544-7e21-4b9f-b0d7-9e51e0569714/947125f0-9664-40eb-953e-b1373b076c9f/tmp-sparse.vmdk {{(pid=61995) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 542.924768] env[61995]: DEBUG nova.virt.vmwareapi.vmops [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] [instance: 1bf7f97f-ccfe-4ee0-a6fb-2234769c4b62] Downloading image file data 947125f0-9664-40eb-953e-b1373b076c9f to [datastore2] vmware_temp/c7145544-7e21-4b9f-b0d7-9e51e0569714/947125f0-9664-40eb-953e-b1373b076c9f/tmp-sparse.vmdk on the data store datastore2 {{(pid=61995) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 542.925970] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-498bb595-61c0-4b77-9486-9770ee6de423 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.931918] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72ffc2b6-886b-4bc5-8d7d-6a22144c6dee {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.938923] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dd164d4-45f3-490f-83df-5f23d893145c {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.973675] env[61995]: DEBUG nova.network.neutron [None req-6edc7ef5-8dea-41f2-9cd6-a09a4eb02c4e tempest-ServerExternalEventsTest-367181382 tempest-ServerExternalEventsTest-367181382-project-member] [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] Instance cache missing network info. {{(pid=61995) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 542.974566] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-6edc7ef5-8dea-41f2-9cd6-a09a4eb02c4e tempest-ServerExternalEventsTest-367181382 tempest-ServerExternalEventsTest-367181382-project-member] Expecting reply to msg 6a8e7bfe6bed483bbe0c67191ca24e35 in queue reply_757213bc08bb49dab178826d88b76f40 [ 542.976814] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bcaffa7-5cd5-4065-9763-24910231ca86 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.985449] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6a8e7bfe6bed483bbe0c67191ca24e35 [ 542.987376] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e0da139-68be-443f-bcc5-f4c5563218e4 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.996686] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed10de8a-e33f-4982-b219-92995252e3b5 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.029623] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-111bd26b-7f0e-4e58-a6e1-855f60d1c81d {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.041752] env[61995]: DEBUG nova.compute.provider_tree [None req-eb6c806e-954e-45e9-85aa-88d75b833fb0 tempest-AttachVolumeTestJSON-1605116632 tempest-AttachVolumeTestJSON-1605116632-project-member] Inventory has not changed in ProviderTree for provider: c68e615f-c0c6-4278-bb7b-a579e68878a7 {{(pid=61995) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 543.042185] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-eb6c806e-954e-45e9-85aa-88d75b833fb0 tempest-AttachVolumeTestJSON-1605116632 tempest-AttachVolumeTestJSON-1605116632-project-member] Expecting reply to msg 275776a5165742eead06364644046eaa in queue reply_757213bc08bb49dab178826d88b76f40 [ 543.047237] env[61995]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-bcb01ae3-8ab5-48fa-a8d1-9ec3b1c07d89 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.051251] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 275776a5165742eead06364644046eaa [ 543.094127] env[61995]: DEBUG nova.virt.vmwareapi.images [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] [instance: 1bf7f97f-ccfe-4ee0-a6fb-2234769c4b62] Downloading image file data 947125f0-9664-40eb-953e-b1373b076c9f to the data store datastore2 {{(pid=61995) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 543.160081] env[61995]: DEBUG oslo_vmware.rw_handles [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c7145544-7e21-4b9f-b0d7-9e51e0569714/947125f0-9664-40eb-953e-b1373b076c9f/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61995) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 543.486170] env[61995]: DEBUG nova.network.neutron [None req-6edc7ef5-8dea-41f2-9cd6-a09a4eb02c4e tempest-ServerExternalEventsTest-367181382 tempest-ServerExternalEventsTest-367181382-project-member] [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] Updating instance_info_cache with network_info: [] {{(pid=61995) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 543.487206] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-6edc7ef5-8dea-41f2-9cd6-a09a4eb02c4e tempest-ServerExternalEventsTest-367181382 tempest-ServerExternalEventsTest-367181382-project-member] Expecting reply to msg aa3f2c7be3604ced8a8e5898eb89f1af in queue reply_757213bc08bb49dab178826d88b76f40 [ 543.501798] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg aa3f2c7be3604ced8a8e5898eb89f1af [ 543.544918] env[61995]: DEBUG nova.scheduler.client.report [None req-eb6c806e-954e-45e9-85aa-88d75b833fb0 tempest-AttachVolumeTestJSON-1605116632 tempest-AttachVolumeTestJSON-1605116632-project-member] Inventory has not changed for provider c68e615f-c0c6-4278-bb7b-a579e68878a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61995) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 543.548605] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-eb6c806e-954e-45e9-85aa-88d75b833fb0 tempest-AttachVolumeTestJSON-1605116632 tempest-AttachVolumeTestJSON-1605116632-project-member] Expecting reply to msg be9b9eac8c4b46328f035eb0d679ef5a in queue reply_757213bc08bb49dab178826d88b76f40 [ 543.584048] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg be9b9eac8c4b46328f035eb0d679ef5a [ 543.994448] env[61995]: INFO nova.compute.manager [None req-6edc7ef5-8dea-41f2-9cd6-a09a4eb02c4e tempest-ServerExternalEventsTest-367181382 tempest-ServerExternalEventsTest-367181382-project-member] [instance: 4c0dd905-f751-4cb4-9be1-ef06518990f8] Took 1.09 seconds to deallocate network for instance. [ 543.994448] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-6edc7ef5-8dea-41f2-9cd6-a09a4eb02c4e tempest-ServerExternalEventsTest-367181382 tempest-ServerExternalEventsTest-367181382-project-member] Expecting reply to msg 2c69b685b1e845a680d8b29f693298f1 in queue reply_757213bc08bb49dab178826d88b76f40 [ 544.012682] env[61995]: DEBUG oslo_vmware.rw_handles [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Completed reading data from the image iterator. {{(pid=61995) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 544.012993] env[61995]: DEBUG oslo_vmware.rw_handles [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c7145544-7e21-4b9f-b0d7-9e51e0569714/947125f0-9664-40eb-953e-b1373b076c9f/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61995) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 544.054139] env[61995]: DEBUG oslo_concurrency.lockutils [None req-eb6c806e-954e-45e9-85aa-88d75b833fb0 tempest-AttachVolumeTestJSON-1605116632 tempest-AttachVolumeTestJSON-1605116632-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.348s {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 544.054666] env[61995]: DEBUG nova.compute.manager [None req-eb6c806e-954e-45e9-85aa-88d75b833fb0 tempest-AttachVolumeTestJSON-1605116632 tempest-AttachVolumeTestJSON-1605116632-project-member] [instance: b819920a-cc74-4718-a054-e81affabcd5b] Start building networks asynchronously for instance. {{(pid=61995) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 544.056536] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-eb6c806e-954e-45e9-85aa-88d75b833fb0 tempest-AttachVolumeTestJSON-1605116632 tempest-AttachVolumeTestJSON-1605116632-project-member] Expecting reply to msg 27887d797ade481ba614a9e06b054607 in queue reply_757213bc08bb49dab178826d88b76f40 [ 544.057651] env[61995]: DEBUG oslo_concurrency.lockutils [None req-ea640fa2-eaf2-48c6-9579-d88038be7ec7 tempest-MigrationsAdminTest-1704279807 tempest-MigrationsAdminTest-1704279807-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 6.915s {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 544.059671] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-ea640fa2-eaf2-48c6-9579-d88038be7ec7 tempest-MigrationsAdminTest-1704279807 tempest-MigrationsAdminTest-1704279807-project-member] Expecting reply to msg 45dde612cb36491ebd4b462609ccb91e in queue reply_757213bc08bb49dab178826d88b76f40 [ 544.087828] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2c69b685b1e845a680d8b29f693298f1 [ 544.099032] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 27887d797ade481ba614a9e06b054607 [ 544.113190] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 45dde612cb36491ebd4b462609ccb91e [ 544.127683] env[61995]: DEBUG nova.virt.vmwareapi.images [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] [instance: 1bf7f97f-ccfe-4ee0-a6fb-2234769c4b62] Downloaded image file data 947125f0-9664-40eb-953e-b1373b076c9f to vmware_temp/c7145544-7e21-4b9f-b0d7-9e51e0569714/947125f0-9664-40eb-953e-b1373b076c9f/tmp-sparse.vmdk on the data store datastore2 {{(pid=61995) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 544.129481] env[61995]: DEBUG nova.virt.vmwareapi.vmops [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] [instance: 1bf7f97f-ccfe-4ee0-a6fb-2234769c4b62] Caching image {{(pid=61995) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 544.129829] env[61995]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Copying Virtual Disk [datastore2] vmware_temp/c7145544-7e21-4b9f-b0d7-9e51e0569714/947125f0-9664-40eb-953e-b1373b076c9f/tmp-sparse.vmdk to [datastore2] vmware_temp/c7145544-7e21-4b9f-b0d7-9e51e0569714/947125f0-9664-40eb-953e-b1373b076c9f/947125f0-9664-40eb-953e-b1373b076c9f.vmdk {{(pid=61995) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 544.130111] env[61995]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ebf5ad26-91b4-4c10-97fe-d9c0f08ea7b0 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.138719] env[61995]: DEBUG oslo_vmware.api [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Waiting for the task: (returnval){ [ 544.138719] env[61995]: value = "task-378084" [ 544.138719] env[61995]: _type = "Task" [ 544.138719] env[61995]: } to complete. {{(pid=61995) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 544.149338] env[61995]: DEBUG oslo_vmware.api [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Task: {'id': task-378084, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61995) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 544.498876] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-6edc7ef5-8dea-41f2-9cd6-a09a4eb02c4e tempest-ServerExternalEventsTest-367181382 tempest-ServerExternalEventsTest-367181382-project-member] Expecting reply to msg 58d43eeed6834b4a98e4cd1fe1b64452 in queue reply_757213bc08bb49dab178826d88b76f40 [ 544.567048] env[61995]: DEBUG nova.compute.utils [None req-eb6c806e-954e-45e9-85aa-88d75b833fb0 tempest-AttachVolumeTestJSON-1605116632 tempest-AttachVolumeTestJSON-1605116632-project-member] Using /dev/sd instead of None {{(pid=61995) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 544.567048] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-eb6c806e-954e-45e9-85aa-88d75b833fb0 tempest-AttachVolumeTestJSON-1605116632 tempest-AttachVolumeTestJSON-1605116632-project-member] Expecting reply to msg 1432f9e375914c269de89e6f166adb78 in queue reply_757213bc08bb49dab178826d88b76f40 [ 544.574305] env[61995]: DEBUG nova.compute.manager [None req-eb6c806e-954e-45e9-85aa-88d75b833fb0 tempest-AttachVolumeTestJSON-1605116632 tempest-AttachVolumeTestJSON-1605116632-project-member] [instance: b819920a-cc74-4718-a054-e81affabcd5b] Allocating IP information in the background. {{(pid=61995) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1990}} [ 544.574993] env[61995]: DEBUG nova.network.neutron [None req-eb6c806e-954e-45e9-85aa-88d75b833fb0 tempest-AttachVolumeTestJSON-1605116632 tempest-AttachVolumeTestJSON-1605116632-project-member] [instance: b819920a-cc74-4718-a054-e81affabcd5b] allocate_for_instance() {{(pid=61995) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 544.580748] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 58d43eeed6834b4a98e4cd1fe1b64452 [ 544.581301] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1432f9e375914c269de89e6f166adb78 [ 544.648558] env[61995]: DEBUG nova.policy [None req-eb6c806e-954e-45e9-85aa-88d75b833fb0 tempest-AttachVolumeTestJSON-1605116632 tempest-AttachVolumeTestJSON-1605116632-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '39f5f66c1d4f42f68d85a3c0c9247275', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fed848926cc548678668e4cdd680ef04', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61995) authorize /opt/stack/nova/nova/policy.py:203}} [ 544.656659] env[61995]: DEBUG oslo_vmware.api [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Task: {'id': task-378084, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=61995) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 544.673199] env[61995]: DEBUG oslo_concurrency.lockutils [None req-c2ead0ea-6b64-4408-9280-a0773cf9692e tempest-ServersWithSpecificFlavorTestJSON-889640241 tempest-ServersWithSpecificFlavorTestJSON-889640241-project-member] Acquiring lock "afe9a3ca-2235-4a2b-9b9f-7988a77f3c2d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 544.673199] env[61995]: DEBUG oslo_concurrency.lockutils [None req-c2ead0ea-6b64-4408-9280-a0773cf9692e tempest-ServersWithSpecificFlavorTestJSON-889640241 tempest-ServersWithSpecificFlavorTestJSON-889640241-project-member] Lock "afe9a3ca-2235-4a2b-9b9f-7988a77f3c2d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 544.674679] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-c2ead0ea-6b64-4408-9280-a0773cf9692e tempest-ServersWithSpecificFlavorTestJSON-889640241 tempest-ServersWithSpecificFlavorTestJSON-889640241-project-member] Expecting reply to msg 015df4701aa44471a443e2de68f8a1c6 in queue reply_757213bc08bb49dab178826d88b76f40 [ 544.691046] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 015df4701aa44471a443e2de68f8a1c6 [ 544.792558] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebff5999-2d1c-4bce-9715-5ea7ae14d28e {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.803869] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bda7217f-a858-4aed-91d7-d82f8be4b0c6 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.849634] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b6bdcbd-dee0-4066-a946-c333953c6dd0 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.860804] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30b4fd06-b328-403c-8981-2fd237ed46a8 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.874758] env[61995]: DEBUG nova.compute.provider_tree [None req-ea640fa2-eaf2-48c6-9579-d88038be7ec7 tempest-MigrationsAdminTest-1704279807 tempest-MigrationsAdminTest-1704279807-project-member] Inventory has not changed in ProviderTree for provider: c68e615f-c0c6-4278-bb7b-a579e68878a7 {{(pid=61995) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 544.879827] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-ea640fa2-eaf2-48c6-9579-d88038be7ec7 tempest-MigrationsAdminTest-1704279807 tempest-MigrationsAdminTest-1704279807-project-member] Expecting reply to msg 6f919455f13b4a84ad09a458a3d1f607 in queue reply_757213bc08bb49dab178826d88b76f40 [ 544.888285] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6f919455f13b4a84ad09a458a3d1f607 [ 545.031567] env[61995]: INFO nova.scheduler.client.report [None req-6edc7ef5-8dea-41f2-9cd6-a09a4eb02c4e tempest-ServerExternalEventsTest-367181382 tempest-ServerExternalEventsTest-367181382-project-member] Deleted allocations for instance 4c0dd905-f751-4cb4-9be1-ef06518990f8 [ 545.040191] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-6edc7ef5-8dea-41f2-9cd6-a09a4eb02c4e tempest-ServerExternalEventsTest-367181382 tempest-ServerExternalEventsTest-367181382-project-member] Expecting reply to msg 8c1473aca92b403da2f9c9048595636c in queue reply_757213bc08bb49dab178826d88b76f40 [ 545.059069] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8c1473aca92b403da2f9c9048595636c [ 545.074219] env[61995]: DEBUG nova.compute.manager [None req-eb6c806e-954e-45e9-85aa-88d75b833fb0 tempest-AttachVolumeTestJSON-1605116632 tempest-AttachVolumeTestJSON-1605116632-project-member] [instance: b819920a-cc74-4718-a054-e81affabcd5b] Start building block device mappings for instance. {{(pid=61995) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 545.076180] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-eb6c806e-954e-45e9-85aa-88d75b833fb0 tempest-AttachVolumeTestJSON-1605116632 tempest-AttachVolumeTestJSON-1605116632-project-member] Expecting reply to msg 3f581f5986074dc39f64216b5361cbd7 in queue reply_757213bc08bb49dab178826d88b76f40 [ 545.150474] env[61995]: DEBUG oslo_vmware.api [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Task: {'id': task-378084, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.716497} completed successfully. {{(pid=61995) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 545.150990] env[61995]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Copied Virtual Disk [datastore2] vmware_temp/c7145544-7e21-4b9f-b0d7-9e51e0569714/947125f0-9664-40eb-953e-b1373b076c9f/tmp-sparse.vmdk to [datastore2] vmware_temp/c7145544-7e21-4b9f-b0d7-9e51e0569714/947125f0-9664-40eb-953e-b1373b076c9f/947125f0-9664-40eb-953e-b1373b076c9f.vmdk {{(pid=61995) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 545.151423] env[61995]: DEBUG nova.virt.vmwareapi.ds_util [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Deleting the datastore file [datastore2] vmware_temp/c7145544-7e21-4b9f-b0d7-9e51e0569714/947125f0-9664-40eb-953e-b1373b076c9f/tmp-sparse.vmdk {{(pid=61995) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 545.152277] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3f581f5986074dc39f64216b5361cbd7 [ 545.152739] env[61995]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a804386f-8ce3-42c3-a0bf-eeefcc28e736 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.160373] env[61995]: DEBUG oslo_vmware.api [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Waiting for the task: (returnval){ [ 545.160373] env[61995]: value = "task-378085" [ 545.160373] env[61995]: _type = "Task" [ 545.160373] env[61995]: } to complete. {{(pid=61995) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 545.170342] env[61995]: DEBUG oslo_vmware.api [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Task: {'id': task-378085, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61995) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 545.178630] env[61995]: DEBUG nova.compute.manager [None req-c2ead0ea-6b64-4408-9280-a0773cf9692e tempest-ServersWithSpecificFlavorTestJSON-889640241 tempest-ServersWithSpecificFlavorTestJSON-889640241-project-member] [instance: afe9a3ca-2235-4a2b-9b9f-7988a77f3c2d] Starting instance... {{(pid=61995) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 545.184026] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-c2ead0ea-6b64-4408-9280-a0773cf9692e tempest-ServersWithSpecificFlavorTestJSON-889640241 tempest-ServersWithSpecificFlavorTestJSON-889640241-project-member] Expecting reply to msg 479ed5a5a3c2445e8a5c3e8953e5c734 in queue reply_757213bc08bb49dab178826d88b76f40 [ 545.240533] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 479ed5a5a3c2445e8a5c3e8953e5c734 [ 545.335416] env[61995]: DEBUG oslo_concurrency.lockutils [None req-8622c767-54c8-41f6-a2f5-ea1693fb87d7 tempest-ServersAdminTestJSON-252110596 tempest-ServersAdminTestJSON-252110596-project-member] Acquiring lock "7cf953f7-4afd-4a20-86b6-96a662b6139f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 545.335645] env[61995]: DEBUG oslo_concurrency.lockutils [None req-8622c767-54c8-41f6-a2f5-ea1693fb87d7 tempest-ServersAdminTestJSON-252110596 tempest-ServersAdminTestJSON-252110596-project-member] Lock "7cf953f7-4afd-4a20-86b6-96a662b6139f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 545.336112] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-8622c767-54c8-41f6-a2f5-ea1693fb87d7 tempest-ServersAdminTestJSON-252110596 tempest-ServersAdminTestJSON-252110596-project-member] Expecting reply to msg d9177ea2ff3145818903f8ce4525e7b2 in queue reply_757213bc08bb49dab178826d88b76f40 [ 545.346861] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d9177ea2ff3145818903f8ce4525e7b2 [ 545.382960] env[61995]: DEBUG nova.scheduler.client.report [None req-ea640fa2-eaf2-48c6-9579-d88038be7ec7 tempest-MigrationsAdminTest-1704279807 tempest-MigrationsAdminTest-1704279807-project-member] Inventory has not changed for provider c68e615f-c0c6-4278-bb7b-a579e68878a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61995) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 545.385357] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-ea640fa2-eaf2-48c6-9579-d88038be7ec7 tempest-MigrationsAdminTest-1704279807 tempest-MigrationsAdminTest-1704279807-project-member] Expecting reply to msg 477a42c29b3247428d069ff3c8e60212 in queue reply_757213bc08bb49dab178826d88b76f40 [ 545.402914] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 477a42c29b3247428d069ff3c8e60212 [ 545.542432] env[61995]: DEBUG oslo_concurrency.lockutils [None req-6edc7ef5-8dea-41f2-9cd6-a09a4eb02c4e tempest-ServerExternalEventsTest-367181382 tempest-ServerExternalEventsTest-367181382-project-member] Lock "4c0dd905-f751-4cb4-9be1-ef06518990f8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.234s {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 545.581599] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-eb6c806e-954e-45e9-85aa-88d75b833fb0 tempest-AttachVolumeTestJSON-1605116632 tempest-AttachVolumeTestJSON-1605116632-project-member] Expecting reply to msg 7cfbf1ceaab943608113e301b53d4e31 in queue reply_757213bc08bb49dab178826d88b76f40 [ 545.626841] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7cfbf1ceaab943608113e301b53d4e31 [ 545.671022] env[61995]: DEBUG oslo_vmware.api [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Task: {'id': task-378085, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.025992} completed successfully. {{(pid=61995) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 545.673757] env[61995]: DEBUG nova.virt.vmwareapi.ds_util [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Deleted the datastore file {{(pid=61995) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 545.674146] env[61995]: DEBUG nova.virt.vmwareapi.ds_util [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Moving file from [datastore2] vmware_temp/c7145544-7e21-4b9f-b0d7-9e51e0569714/947125f0-9664-40eb-953e-b1373b076c9f to [datastore2] devstack-image-cache_base/947125f0-9664-40eb-953e-b1373b076c9f. {{(pid=61995) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 545.674527] env[61995]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-60bf8a59-eec0-4b99-af25-611fdcfb238a {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.682057] env[61995]: DEBUG oslo_vmware.api [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Waiting for the task: (returnval){ [ 545.682057] env[61995]: value = "task-378086" [ 545.682057] env[61995]: _type = "Task" [ 545.682057] env[61995]: } to complete. {{(pid=61995) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 545.696751] env[61995]: DEBUG oslo_vmware.api [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Task: {'id': task-378086, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=61995) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 545.704484] env[61995]: DEBUG oslo_concurrency.lockutils [None req-c2ead0ea-6b64-4408-9280-a0773cf9692e tempest-ServersWithSpecificFlavorTestJSON-889640241 tempest-ServersWithSpecificFlavorTestJSON-889640241-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 545.839550] env[61995]: DEBUG nova.compute.manager [None req-8622c767-54c8-41f6-a2f5-ea1693fb87d7 tempest-ServersAdminTestJSON-252110596 tempest-ServersAdminTestJSON-252110596-project-member] [instance: 7cf953f7-4afd-4a20-86b6-96a662b6139f] Starting instance... {{(pid=61995) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 545.841447] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-8622c767-54c8-41f6-a2f5-ea1693fb87d7 tempest-ServersAdminTestJSON-252110596 tempest-ServersAdminTestJSON-252110596-project-member] Expecting reply to msg e1aca4becadc467f83cf45aedc7f5625 in queue reply_757213bc08bb49dab178826d88b76f40 [ 545.847040] env[61995]: DEBUG oslo_concurrency.lockutils [None req-f05e4bca-5463-449c-bcd7-bd61934970a5 tempest-AttachInterfacesTestJSON-1933428740 tempest-AttachInterfacesTestJSON-1933428740-project-member] Acquiring lock "4efc40b0-7ac3-4455-be40-4480240f1ae6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 545.847293] env[61995]: DEBUG oslo_concurrency.lockutils [None req-f05e4bca-5463-449c-bcd7-bd61934970a5 tempest-AttachInterfacesTestJSON-1933428740 tempest-AttachInterfacesTestJSON-1933428740-project-member] Lock "4efc40b0-7ac3-4455-be40-4480240f1ae6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 545.847746] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-f05e4bca-5463-449c-bcd7-bd61934970a5 tempest-AttachInterfacesTestJSON-1933428740 tempest-AttachInterfacesTestJSON-1933428740-project-member] Expecting reply to msg 9f3cd1285baf42faba27c19b373a6ecd in queue reply_757213bc08bb49dab178826d88b76f40 [ 545.864545] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9f3cd1285baf42faba27c19b373a6ecd [ 545.886748] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e1aca4becadc467f83cf45aedc7f5625 [ 545.891819] env[61995]: DEBUG oslo_concurrency.lockutils [None req-ea640fa2-eaf2-48c6-9579-d88038be7ec7 tempest-MigrationsAdminTest-1704279807 tempest-MigrationsAdminTest-1704279807-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.834s {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 545.892384] env[61995]: ERROR nova.compute.manager [None req-ea640fa2-eaf2-48c6-9579-d88038be7ec7 tempest-MigrationsAdminTest-1704279807 tempest-MigrationsAdminTest-1704279807-project-member] [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port a37484e9-c5c2-4756-a547-b348cb68b24b, please check neutron logs for more information. [ 545.892384] env[61995]: ERROR nova.compute.manager [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] Traceback (most recent call last): [ 545.892384] env[61995]: ERROR nova.compute.manager [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] File "/opt/stack/nova/nova/compute/manager.py", line 2650, in _build_and_run_instance [ 545.892384] env[61995]: ERROR nova.compute.manager [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] self.driver.spawn(context, instance, image_meta, [ 545.892384] env[61995]: ERROR nova.compute.manager [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 545.892384] env[61995]: ERROR nova.compute.manager [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] self._vmops.spawn(context, instance, image_meta, injected_files, [ 545.892384] env[61995]: ERROR nova.compute.manager [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 545.892384] env[61995]: ERROR nova.compute.manager [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] vm_ref = self.build_virtual_machine(instance, [ 545.892384] env[61995]: ERROR nova.compute.manager [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 545.892384] env[61995]: ERROR nova.compute.manager [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] vif_infos = vmwarevif.get_vif_info(self._session, [ 545.892384] env[61995]: ERROR nova.compute.manager [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 545.892728] env[61995]: ERROR nova.compute.manager [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] for vif in network_info: [ 545.892728] env[61995]: ERROR nova.compute.manager [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 545.892728] env[61995]: ERROR nova.compute.manager [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] return self._sync_wrapper(fn, *args, **kwargs) [ 545.892728] env[61995]: ERROR nova.compute.manager [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 545.892728] env[61995]: ERROR nova.compute.manager [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] self.wait() [ 545.892728] env[61995]: ERROR nova.compute.manager [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 545.892728] env[61995]: ERROR nova.compute.manager [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] self[:] = self._gt.wait() [ 545.892728] env[61995]: ERROR nova.compute.manager [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 545.892728] env[61995]: ERROR nova.compute.manager [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] return self._exit_event.wait() [ 545.892728] env[61995]: ERROR nova.compute.manager [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 545.892728] env[61995]: ERROR nova.compute.manager [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] result = hub.switch() [ 545.892728] env[61995]: ERROR nova.compute.manager [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 545.892728] env[61995]: ERROR nova.compute.manager [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] return self.greenlet.switch() [ 545.893074] env[61995]: ERROR nova.compute.manager [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 545.893074] env[61995]: ERROR nova.compute.manager [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] result = function(*args, **kwargs) [ 545.893074] env[61995]: ERROR nova.compute.manager [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 545.893074] env[61995]: ERROR nova.compute.manager [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] return func(*args, **kwargs) [ 545.893074] env[61995]: ERROR nova.compute.manager [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] File "/opt/stack/nova/nova/compute/manager.py", line 2020, in _allocate_network_async [ 545.893074] env[61995]: ERROR nova.compute.manager [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] raise e [ 545.893074] env[61995]: ERROR nova.compute.manager [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] File "/opt/stack/nova/nova/compute/manager.py", line 1998, in _allocate_network_async [ 545.893074] env[61995]: ERROR nova.compute.manager [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] nwinfo = self.network_api.allocate_for_instance( [ 545.893074] env[61995]: ERROR nova.compute.manager [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 545.893074] env[61995]: ERROR nova.compute.manager [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] created_port_ids = self._update_ports_for_instance( [ 545.893074] env[61995]: ERROR nova.compute.manager [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 545.893074] env[61995]: ERROR nova.compute.manager [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] with excutils.save_and_reraise_exception(): [ 545.893074] env[61995]: ERROR nova.compute.manager [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 545.893417] env[61995]: ERROR nova.compute.manager [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] self.force_reraise() [ 545.893417] env[61995]: ERROR nova.compute.manager [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 545.893417] env[61995]: ERROR nova.compute.manager [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] raise self.value [ 545.893417] env[61995]: ERROR nova.compute.manager [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 545.893417] env[61995]: ERROR nova.compute.manager [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] updated_port = self._update_port( [ 545.893417] env[61995]: ERROR nova.compute.manager [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 545.893417] env[61995]: ERROR nova.compute.manager [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] _ensure_no_port_binding_failure(port) [ 545.893417] env[61995]: ERROR nova.compute.manager [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 545.893417] env[61995]: ERROR nova.compute.manager [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] raise exception.PortBindingFailed(port_id=port['id']) [ 545.893417] env[61995]: ERROR nova.compute.manager [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] nova.exception.PortBindingFailed: Binding failed for port a37484e9-c5c2-4756-a547-b348cb68b24b, please check neutron logs for more information. [ 545.893417] env[61995]: ERROR nova.compute.manager [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] [ 545.893714] env[61995]: DEBUG nova.compute.utils [None req-ea640fa2-eaf2-48c6-9579-d88038be7ec7 tempest-MigrationsAdminTest-1704279807 tempest-MigrationsAdminTest-1704279807-project-member] [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] Binding failed for port a37484e9-c5c2-4756-a547-b348cb68b24b, please check neutron logs for more information. {{(pid=61995) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 545.894252] env[61995]: DEBUG oslo_concurrency.lockutils [None req-a85a9074-04e3-4381-94ff-506e7a8711e5 tempest-AttachInterfacesV270Test-1294541225 tempest-AttachInterfacesV270Test-1294541225-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.923s {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 545.897885] env[61995]: INFO nova.compute.claims [None req-a85a9074-04e3-4381-94ff-506e7a8711e5 tempest-AttachInterfacesV270Test-1294541225 tempest-AttachInterfacesV270Test-1294541225-project-member] [instance: 2dbe6731-bab0-4a47-91c7-a1d8b42dcec4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 545.898067] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-a85a9074-04e3-4381-94ff-506e7a8711e5 tempest-AttachInterfacesV270Test-1294541225 tempest-AttachInterfacesV270Test-1294541225-project-member] Expecting reply to msg 3cd563bae1554cfb966d3efa7cab43c3 in queue reply_757213bc08bb49dab178826d88b76f40 [ 545.900244] env[61995]: DEBUG nova.compute.manager [None req-ea640fa2-eaf2-48c6-9579-d88038be7ec7 tempest-MigrationsAdminTest-1704279807 tempest-MigrationsAdminTest-1704279807-project-member] [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] Build of instance b610e51c-f7a4-4e3a-85c1-28603fc82bab was re-scheduled: Binding failed for port a37484e9-c5c2-4756-a547-b348cb68b24b, please check neutron logs for more information. {{(pid=61995) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2489}} [ 545.900244] env[61995]: DEBUG nova.compute.manager [None req-ea640fa2-eaf2-48c6-9579-d88038be7ec7 tempest-MigrationsAdminTest-1704279807 tempest-MigrationsAdminTest-1704279807-project-member] [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] Unplugging VIFs for instance {{(pid=61995) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3015}} [ 545.900244] env[61995]: DEBUG oslo_concurrency.lockutils [None req-ea640fa2-eaf2-48c6-9579-d88038be7ec7 tempest-MigrationsAdminTest-1704279807 tempest-MigrationsAdminTest-1704279807-project-member] Acquiring lock "refresh_cache-b610e51c-f7a4-4e3a-85c1-28603fc82bab" {{(pid=61995) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 545.900244] env[61995]: DEBUG oslo_concurrency.lockutils [None req-ea640fa2-eaf2-48c6-9579-d88038be7ec7 tempest-MigrationsAdminTest-1704279807 tempest-MigrationsAdminTest-1704279807-project-member] Acquired lock "refresh_cache-b610e51c-f7a4-4e3a-85c1-28603fc82bab" {{(pid=61995) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 545.900469] env[61995]: DEBUG nova.network.neutron [None req-ea640fa2-eaf2-48c6-9579-d88038be7ec7 tempest-MigrationsAdminTest-1704279807 tempest-MigrationsAdminTest-1704279807-project-member] [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] Building network info cache for instance {{(pid=61995) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 545.901774] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-ea640fa2-eaf2-48c6-9579-d88038be7ec7 tempest-MigrationsAdminTest-1704279807 tempest-MigrationsAdminTest-1704279807-project-member] Expecting reply to msg 8dbf34c7b56c44308eded299074a3288 in queue reply_757213bc08bb49dab178826d88b76f40 [ 545.911388] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8dbf34c7b56c44308eded299074a3288 [ 545.937126] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3cd563bae1554cfb966d3efa7cab43c3 [ 546.010192] env[61995]: DEBUG nova.network.neutron [None req-eb6c806e-954e-45e9-85aa-88d75b833fb0 tempest-AttachVolumeTestJSON-1605116632 tempest-AttachVolumeTestJSON-1605116632-project-member] [instance: b819920a-cc74-4718-a054-e81affabcd5b] Successfully created port: 3a82a148-8617-45cb-95d6-514d987604e1 {{(pid=61995) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 546.088277] env[61995]: DEBUG nova.compute.manager [None req-eb6c806e-954e-45e9-85aa-88d75b833fb0 tempest-AttachVolumeTestJSON-1605116632 tempest-AttachVolumeTestJSON-1605116632-project-member] [instance: b819920a-cc74-4718-a054-e81affabcd5b] Start spawning the instance on the hypervisor. {{(pid=61995) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 546.116372] env[61995]: DEBUG nova.virt.hardware [None req-eb6c806e-954e-45e9-85aa-88d75b833fb0 tempest-AttachVolumeTestJSON-1605116632 tempest-AttachVolumeTestJSON-1605116632-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-06T07:57:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-06T07:57:11Z,direct_url=,disk_format='vmdk',id=947125f0-9664-40eb-953e-b1373b076c9f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3dc1e6434ef44fd0be01fd4ebf2b62ef',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-06T07:57:12Z,virtual_size=,visibility=), allow threads: False {{(pid=61995) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 546.116372] env[61995]: DEBUG nova.virt.hardware [None req-eb6c806e-954e-45e9-85aa-88d75b833fb0 tempest-AttachVolumeTestJSON-1605116632 tempest-AttachVolumeTestJSON-1605116632-project-member] Flavor limits 0:0:0 {{(pid=61995) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 546.116372] env[61995]: DEBUG nova.virt.hardware [None req-eb6c806e-954e-45e9-85aa-88d75b833fb0 tempest-AttachVolumeTestJSON-1605116632 tempest-AttachVolumeTestJSON-1605116632-project-member] Image limits 0:0:0 {{(pid=61995) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 546.116524] env[61995]: DEBUG nova.virt.hardware [None req-eb6c806e-954e-45e9-85aa-88d75b833fb0 tempest-AttachVolumeTestJSON-1605116632 tempest-AttachVolumeTestJSON-1605116632-project-member] Flavor pref 0:0:0 {{(pid=61995) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 546.116524] env[61995]: DEBUG nova.virt.hardware [None req-eb6c806e-954e-45e9-85aa-88d75b833fb0 tempest-AttachVolumeTestJSON-1605116632 tempest-AttachVolumeTestJSON-1605116632-project-member] Image pref 0:0:0 {{(pid=61995) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 546.116524] env[61995]: DEBUG nova.virt.hardware [None req-eb6c806e-954e-45e9-85aa-88d75b833fb0 tempest-AttachVolumeTestJSON-1605116632 tempest-AttachVolumeTestJSON-1605116632-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61995) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 546.116524] env[61995]: DEBUG nova.virt.hardware [None req-eb6c806e-954e-45e9-85aa-88d75b833fb0 tempest-AttachVolumeTestJSON-1605116632 tempest-AttachVolumeTestJSON-1605116632-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61995) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 546.116524] env[61995]: DEBUG nova.virt.hardware [None req-eb6c806e-954e-45e9-85aa-88d75b833fb0 tempest-AttachVolumeTestJSON-1605116632 tempest-AttachVolumeTestJSON-1605116632-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61995) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 546.116671] env[61995]: DEBUG nova.virt.hardware [None req-eb6c806e-954e-45e9-85aa-88d75b833fb0 tempest-AttachVolumeTestJSON-1605116632 tempest-AttachVolumeTestJSON-1605116632-project-member] Got 1 possible topologies {{(pid=61995) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 546.116671] env[61995]: DEBUG nova.virt.hardware [None req-eb6c806e-954e-45e9-85aa-88d75b833fb0 tempest-AttachVolumeTestJSON-1605116632 tempest-AttachVolumeTestJSON-1605116632-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61995) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 546.116671] env[61995]: DEBUG nova.virt.hardware [None req-eb6c806e-954e-45e9-85aa-88d75b833fb0 tempest-AttachVolumeTestJSON-1605116632 tempest-AttachVolumeTestJSON-1605116632-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61995) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 546.116671] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03e4df18-58b6-406c-978d-a2c6d281e1cd {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.124133] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b3c0e9a-5258-4bc3-8e2c-eb954c106e60 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.192798] env[61995]: DEBUG oslo_vmware.api [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Task: {'id': task-378086, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.031276} completed successfully. {{(pid=61995) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 546.193062] env[61995]: DEBUG nova.virt.vmwareapi.ds_util [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] File moved {{(pid=61995) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 546.193256] env[61995]: DEBUG nova.virt.vmwareapi.vmops [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] [instance: 1bf7f97f-ccfe-4ee0-a6fb-2234769c4b62] Cleaning up location [datastore2] vmware_temp/c7145544-7e21-4b9f-b0d7-9e51e0569714 {{(pid=61995) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 546.193408] env[61995]: DEBUG nova.virt.vmwareapi.ds_util [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Deleting the datastore file [datastore2] vmware_temp/c7145544-7e21-4b9f-b0d7-9e51e0569714 {{(pid=61995) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 546.193647] env[61995]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f09691de-138e-4531-a750-a86d4db1b78c {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.201089] env[61995]: DEBUG oslo_vmware.api [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Waiting for the task: (returnval){ [ 546.201089] env[61995]: value = "task-378087" [ 546.201089] env[61995]: _type = "Task" [ 546.201089] env[61995]: } to complete. {{(pid=61995) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 546.209313] env[61995]: DEBUG oslo_vmware.api [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Task: {'id': task-378087, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61995) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 546.352859] env[61995]: DEBUG nova.compute.manager [None req-f05e4bca-5463-449c-bcd7-bd61934970a5 tempest-AttachInterfacesTestJSON-1933428740 tempest-AttachInterfacesTestJSON-1933428740-project-member] [instance: 4efc40b0-7ac3-4455-be40-4480240f1ae6] Starting instance... {{(pid=61995) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 546.355770] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-f05e4bca-5463-449c-bcd7-bd61934970a5 tempest-AttachInterfacesTestJSON-1933428740 tempest-AttachInterfacesTestJSON-1933428740-project-member] Expecting reply to msg d2328b016cfc474895ae7fc692fa0b9e in queue reply_757213bc08bb49dab178826d88b76f40 [ 546.372338] env[61995]: DEBUG oslo_concurrency.lockutils [None req-8622c767-54c8-41f6-a2f5-ea1693fb87d7 tempest-ServersAdminTestJSON-252110596 tempest-ServersAdminTestJSON-252110596-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 546.403870] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-a85a9074-04e3-4381-94ff-506e7a8711e5 tempest-AttachInterfacesV270Test-1294541225 tempest-AttachInterfacesV270Test-1294541225-project-member] Expecting reply to msg fcfeba8623214bfeb59b1b1537e5c0e2 in queue reply_757213bc08bb49dab178826d88b76f40 [ 546.405226] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d2328b016cfc474895ae7fc692fa0b9e [ 546.411279] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fcfeba8623214bfeb59b1b1537e5c0e2 [ 546.444334] env[61995]: DEBUG nova.network.neutron [None req-ea640fa2-eaf2-48c6-9579-d88038be7ec7 tempest-MigrationsAdminTest-1704279807 tempest-MigrationsAdminTest-1704279807-project-member] [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] Instance cache missing network info. {{(pid=61995) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 546.560220] env[61995]: DEBUG nova.network.neutron [None req-ea640fa2-eaf2-48c6-9579-d88038be7ec7 tempest-MigrationsAdminTest-1704279807 tempest-MigrationsAdminTest-1704279807-project-member] [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] Updating instance_info_cache with network_info: [] {{(pid=61995) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 546.560580] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-ea640fa2-eaf2-48c6-9579-d88038be7ec7 tempest-MigrationsAdminTest-1704279807 tempest-MigrationsAdminTest-1704279807-project-member] Expecting reply to msg 6b1cf7c3e6ce419a9a4ea6b0aec9b4fb in queue reply_757213bc08bb49dab178826d88b76f40 [ 546.572361] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6b1cf7c3e6ce419a9a4ea6b0aec9b4fb [ 546.711206] env[61995]: DEBUG oslo_vmware.api [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Task: {'id': task-378087, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.02577} completed successfully. {{(pid=61995) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 546.711399] env[61995]: DEBUG nova.virt.vmwareapi.ds_util [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Deleted the datastore file {{(pid=61995) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 546.712446] env[61995]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3b7afb6f-dbb9-4543-92e1-e042d856f424 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.717695] env[61995]: DEBUG oslo_vmware.api [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Waiting for the task: (returnval){ [ 546.717695] env[61995]: value = "session[5258be39-d7e5-2e06-99ec-0419db7e1a3e]5215e1bd-f599-4f07-f7d3-cba4b1e9cba1" [ 546.717695] env[61995]: _type = "Task" [ 546.717695] env[61995]: } to complete. {{(pid=61995) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 546.727727] env[61995]: DEBUG oslo_vmware.api [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Task: {'id': session[5258be39-d7e5-2e06-99ec-0419db7e1a3e]5215e1bd-f599-4f07-f7d3-cba4b1e9cba1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61995) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 546.892982] env[61995]: DEBUG oslo_concurrency.lockutils [None req-f05e4bca-5463-449c-bcd7-bd61934970a5 tempest-AttachInterfacesTestJSON-1933428740 tempest-AttachInterfacesTestJSON-1933428740-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 547.063005] env[61995]: DEBUG oslo_concurrency.lockutils [None req-ea640fa2-eaf2-48c6-9579-d88038be7ec7 tempest-MigrationsAdminTest-1704279807 tempest-MigrationsAdminTest-1704279807-project-member] Releasing lock "refresh_cache-b610e51c-f7a4-4e3a-85c1-28603fc82bab" {{(pid=61995) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 547.063252] env[61995]: DEBUG nova.compute.manager [None req-ea640fa2-eaf2-48c6-9579-d88038be7ec7 tempest-MigrationsAdminTest-1704279807 tempest-MigrationsAdminTest-1704279807-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61995) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3038}} [ 547.063417] env[61995]: DEBUG nova.compute.manager [None req-ea640fa2-eaf2-48c6-9579-d88038be7ec7 tempest-MigrationsAdminTest-1704279807 tempest-MigrationsAdminTest-1704279807-project-member] [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] Deallocating network for instance {{(pid=61995) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2298}} [ 547.063599] env[61995]: DEBUG nova.network.neutron [None req-ea640fa2-eaf2-48c6-9579-d88038be7ec7 tempest-MigrationsAdminTest-1704279807 tempest-MigrationsAdminTest-1704279807-project-member] [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] deallocate_for_instance() {{(pid=61995) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 547.086439] env[61995]: DEBUG nova.network.neutron [None req-ea640fa2-eaf2-48c6-9579-d88038be7ec7 tempest-MigrationsAdminTest-1704279807 tempest-MigrationsAdminTest-1704279807-project-member] [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] Instance cache missing network info. {{(pid=61995) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 547.087036] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-ea640fa2-eaf2-48c6-9579-d88038be7ec7 tempest-MigrationsAdminTest-1704279807 tempest-MigrationsAdminTest-1704279807-project-member] Expecting reply to msg 19770f79694a4530a50b4b9089ad3bb5 in queue reply_757213bc08bb49dab178826d88b76f40 [ 547.096256] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 19770f79694a4530a50b4b9089ad3bb5 [ 547.145207] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5ba9365-3150-4de0-9993-05dcadabe6fc {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.152960] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23010e4e-14dc-4687-854e-58d133826067 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.205276] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3db0a709-3330-4da8-8c60-d466bf35b059 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.214770] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12da295c-719f-441b-a918-d663395bfe8b {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.232448] env[61995]: DEBUG nova.compute.provider_tree [None req-a85a9074-04e3-4381-94ff-506e7a8711e5 tempest-AttachInterfacesV270Test-1294541225 tempest-AttachInterfacesV270Test-1294541225-project-member] Inventory has not changed in ProviderTree for provider: c68e615f-c0c6-4278-bb7b-a579e68878a7 {{(pid=61995) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 547.233072] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-a85a9074-04e3-4381-94ff-506e7a8711e5 tempest-AttachInterfacesV270Test-1294541225 tempest-AttachInterfacesV270Test-1294541225-project-member] Expecting reply to msg ba771dc6b70b44bf8c856bc6658a038d in queue reply_757213bc08bb49dab178826d88b76f40 [ 547.238178] env[61995]: DEBUG oslo_vmware.api [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Task: {'id': session[5258be39-d7e5-2e06-99ec-0419db7e1a3e]5215e1bd-f599-4f07-f7d3-cba4b1e9cba1, 'name': SearchDatastore_Task, 'duration_secs': 0.027391} completed successfully. {{(pid=61995) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 547.238670] env[61995]: DEBUG oslo_concurrency.lockutils [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Releasing lock "[datastore2] devstack-image-cache_base/947125f0-9664-40eb-953e-b1373b076c9f/947125f0-9664-40eb-953e-b1373b076c9f.vmdk" {{(pid=61995) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 547.239073] env[61995]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/947125f0-9664-40eb-953e-b1373b076c9f/947125f0-9664-40eb-953e-b1373b076c9f.vmdk to [datastore2] 1bf7f97f-ccfe-4ee0-a6fb-2234769c4b62/1bf7f97f-ccfe-4ee0-a6fb-2234769c4b62.vmdk {{(pid=61995) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 547.239952] env[61995]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8aeadb8c-0a0d-4c21-9808-28025a3b7382 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.243684] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ba771dc6b70b44bf8c856bc6658a038d [ 547.246606] env[61995]: DEBUG oslo_vmware.api [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Waiting for the task: (returnval){ [ 547.246606] env[61995]: value = "task-378088" [ 547.246606] env[61995]: _type = "Task" [ 547.246606] env[61995]: } to complete. {{(pid=61995) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 547.254911] env[61995]: DEBUG oslo_vmware.api [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Task: {'id': task-378088, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61995) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 547.493740] env[61995]: DEBUG oslo_concurrency.lockutils [None req-182cf4eb-3c80-4ae3-ba78-5058296d1da7 tempest-ServersAdminTestJSON-252110596 tempest-ServersAdminTestJSON-252110596-project-member] Acquiring lock "eaa96051-2a7e-4b80-a88b-5bb4faccc8c4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 547.494043] env[61995]: DEBUG oslo_concurrency.lockutils [None req-182cf4eb-3c80-4ae3-ba78-5058296d1da7 tempest-ServersAdminTestJSON-252110596 tempest-ServersAdminTestJSON-252110596-project-member] Lock "eaa96051-2a7e-4b80-a88b-5bb4faccc8c4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 547.591552] env[61995]: DEBUG nova.network.neutron [None req-ea640fa2-eaf2-48c6-9579-d88038be7ec7 tempest-MigrationsAdminTest-1704279807 tempest-MigrationsAdminTest-1704279807-project-member] [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] Updating instance_info_cache with network_info: [] {{(pid=61995) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 547.592264] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-ea640fa2-eaf2-48c6-9579-d88038be7ec7 tempest-MigrationsAdminTest-1704279807 tempest-MigrationsAdminTest-1704279807-project-member] Expecting reply to msg 759c998f388f46a0b9b8ff2710a39876 in queue reply_757213bc08bb49dab178826d88b76f40 [ 547.602506] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 759c998f388f46a0b9b8ff2710a39876 [ 547.733781] env[61995]: ERROR nova.compute.manager [None req-eb6c806e-954e-45e9-85aa-88d75b833fb0 tempest-AttachVolumeTestJSON-1605116632 tempest-AttachVolumeTestJSON-1605116632-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 3a82a148-8617-45cb-95d6-514d987604e1, please check neutron logs for more information. [ 547.733781] env[61995]: ERROR nova.compute.manager Traceback (most recent call last): [ 547.733781] env[61995]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1998, in _allocate_network_async [ 547.733781] env[61995]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 547.733781] env[61995]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 547.733781] env[61995]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 547.733781] env[61995]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 547.733781] env[61995]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 547.733781] env[61995]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 547.733781] env[61995]: ERROR nova.compute.manager self.force_reraise() [ 547.733781] env[61995]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 547.733781] env[61995]: ERROR nova.compute.manager raise self.value [ 547.733781] env[61995]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 547.733781] env[61995]: ERROR nova.compute.manager updated_port = self._update_port( [ 547.733781] env[61995]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 547.733781] env[61995]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 547.734775] env[61995]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 547.734775] env[61995]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 547.734775] env[61995]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 3a82a148-8617-45cb-95d6-514d987604e1, please check neutron logs for more information. [ 547.734775] env[61995]: ERROR nova.compute.manager [ 547.734775] env[61995]: Traceback (most recent call last): [ 547.734775] env[61995]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 547.734775] env[61995]: listener.cb(fileno) [ 547.734775] env[61995]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 547.734775] env[61995]: result = function(*args, **kwargs) [ 547.734775] env[61995]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 547.734775] env[61995]: return func(*args, **kwargs) [ 547.734775] env[61995]: File "/opt/stack/nova/nova/compute/manager.py", line 2020, in _allocate_network_async [ 547.734775] env[61995]: raise e [ 547.734775] env[61995]: File "/opt/stack/nova/nova/compute/manager.py", line 1998, in _allocate_network_async [ 547.734775] env[61995]: nwinfo = self.network_api.allocate_for_instance( [ 547.734775] env[61995]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 547.734775] env[61995]: created_port_ids = self._update_ports_for_instance( [ 547.734775] env[61995]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 547.734775] env[61995]: with excutils.save_and_reraise_exception(): [ 547.734775] env[61995]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 547.734775] env[61995]: self.force_reraise() [ 547.734775] env[61995]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 547.734775] env[61995]: raise self.value [ 547.734775] env[61995]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 547.734775] env[61995]: updated_port = self._update_port( [ 547.734775] env[61995]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 547.734775] env[61995]: _ensure_no_port_binding_failure(port) [ 547.734775] env[61995]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 547.734775] env[61995]: raise exception.PortBindingFailed(port_id=port['id']) [ 547.735677] env[61995]: nova.exception.PortBindingFailed: Binding failed for port 3a82a148-8617-45cb-95d6-514d987604e1, please check neutron logs for more information. [ 547.735677] env[61995]: Removing descriptor: 15 [ 547.735677] env[61995]: ERROR nova.compute.manager [None req-eb6c806e-954e-45e9-85aa-88d75b833fb0 tempest-AttachVolumeTestJSON-1605116632 tempest-AttachVolumeTestJSON-1605116632-project-member] [instance: b819920a-cc74-4718-a054-e81affabcd5b] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 3a82a148-8617-45cb-95d6-514d987604e1, please check neutron logs for more information. [ 547.735677] env[61995]: ERROR nova.compute.manager [instance: b819920a-cc74-4718-a054-e81affabcd5b] Traceback (most recent call last): [ 547.735677] env[61995]: ERROR nova.compute.manager [instance: b819920a-cc74-4718-a054-e81affabcd5b] File "/opt/stack/nova/nova/compute/manager.py", line 2903, in _build_resources [ 547.735677] env[61995]: ERROR nova.compute.manager [instance: b819920a-cc74-4718-a054-e81affabcd5b] yield resources [ 547.735677] env[61995]: ERROR nova.compute.manager [instance: b819920a-cc74-4718-a054-e81affabcd5b] File "/opt/stack/nova/nova/compute/manager.py", line 2650, in _build_and_run_instance [ 547.735677] env[61995]: ERROR nova.compute.manager [instance: b819920a-cc74-4718-a054-e81affabcd5b] self.driver.spawn(context, instance, image_meta, [ 547.735677] env[61995]: ERROR nova.compute.manager [instance: b819920a-cc74-4718-a054-e81affabcd5b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 547.735677] env[61995]: ERROR nova.compute.manager [instance: b819920a-cc74-4718-a054-e81affabcd5b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 547.735677] env[61995]: ERROR nova.compute.manager [instance: b819920a-cc74-4718-a054-e81affabcd5b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 547.735677] env[61995]: ERROR nova.compute.manager [instance: b819920a-cc74-4718-a054-e81affabcd5b] vm_ref = self.build_virtual_machine(instance, [ 547.736147] env[61995]: ERROR nova.compute.manager [instance: b819920a-cc74-4718-a054-e81affabcd5b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 547.736147] env[61995]: ERROR nova.compute.manager [instance: b819920a-cc74-4718-a054-e81affabcd5b] vif_infos = vmwarevif.get_vif_info(self._session, [ 547.736147] env[61995]: ERROR nova.compute.manager [instance: b819920a-cc74-4718-a054-e81affabcd5b] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 547.736147] env[61995]: ERROR nova.compute.manager [instance: b819920a-cc74-4718-a054-e81affabcd5b] for vif in network_info: [ 547.736147] env[61995]: ERROR nova.compute.manager [instance: b819920a-cc74-4718-a054-e81affabcd5b] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 547.736147] env[61995]: ERROR nova.compute.manager [instance: b819920a-cc74-4718-a054-e81affabcd5b] return self._sync_wrapper(fn, *args, **kwargs) [ 547.736147] env[61995]: ERROR nova.compute.manager [instance: b819920a-cc74-4718-a054-e81affabcd5b] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 547.736147] env[61995]: ERROR nova.compute.manager [instance: b819920a-cc74-4718-a054-e81affabcd5b] self.wait() [ 547.736147] env[61995]: ERROR nova.compute.manager [instance: b819920a-cc74-4718-a054-e81affabcd5b] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 547.736147] env[61995]: ERROR nova.compute.manager [instance: b819920a-cc74-4718-a054-e81affabcd5b] self[:] = self._gt.wait() [ 547.736147] env[61995]: ERROR nova.compute.manager [instance: b819920a-cc74-4718-a054-e81affabcd5b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 547.736147] env[61995]: ERROR nova.compute.manager [instance: b819920a-cc74-4718-a054-e81affabcd5b] return self._exit_event.wait() [ 547.736147] env[61995]: ERROR nova.compute.manager [instance: b819920a-cc74-4718-a054-e81affabcd5b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 547.741937] env[61995]: ERROR nova.compute.manager [instance: b819920a-cc74-4718-a054-e81affabcd5b] result = hub.switch() [ 547.741937] env[61995]: ERROR nova.compute.manager [instance: b819920a-cc74-4718-a054-e81affabcd5b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 547.741937] env[61995]: ERROR nova.compute.manager [instance: b819920a-cc74-4718-a054-e81affabcd5b] return self.greenlet.switch() [ 547.741937] env[61995]: ERROR nova.compute.manager [instance: b819920a-cc74-4718-a054-e81affabcd5b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 547.741937] env[61995]: ERROR nova.compute.manager [instance: b819920a-cc74-4718-a054-e81affabcd5b] result = function(*args, **kwargs) [ 547.741937] env[61995]: ERROR nova.compute.manager [instance: b819920a-cc74-4718-a054-e81affabcd5b] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 547.741937] env[61995]: ERROR nova.compute.manager [instance: b819920a-cc74-4718-a054-e81affabcd5b] return func(*args, **kwargs) [ 547.741937] env[61995]: ERROR nova.compute.manager [instance: b819920a-cc74-4718-a054-e81affabcd5b] File "/opt/stack/nova/nova/compute/manager.py", line 2020, in _allocate_network_async [ 547.741937] env[61995]: ERROR nova.compute.manager [instance: b819920a-cc74-4718-a054-e81affabcd5b] raise e [ 547.741937] env[61995]: ERROR nova.compute.manager [instance: b819920a-cc74-4718-a054-e81affabcd5b] File "/opt/stack/nova/nova/compute/manager.py", line 1998, in _allocate_network_async [ 547.741937] env[61995]: ERROR nova.compute.manager [instance: b819920a-cc74-4718-a054-e81affabcd5b] nwinfo = self.network_api.allocate_for_instance( [ 547.741937] env[61995]: ERROR nova.compute.manager [instance: b819920a-cc74-4718-a054-e81affabcd5b] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 547.741937] env[61995]: ERROR nova.compute.manager [instance: b819920a-cc74-4718-a054-e81affabcd5b] created_port_ids = self._update_ports_for_instance( [ 547.742403] env[61995]: ERROR nova.compute.manager [instance: b819920a-cc74-4718-a054-e81affabcd5b] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 547.742403] env[61995]: ERROR nova.compute.manager [instance: b819920a-cc74-4718-a054-e81affabcd5b] with excutils.save_and_reraise_exception(): [ 547.742403] env[61995]: ERROR nova.compute.manager [instance: b819920a-cc74-4718-a054-e81affabcd5b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 547.742403] env[61995]: ERROR nova.compute.manager [instance: b819920a-cc74-4718-a054-e81affabcd5b] self.force_reraise() [ 547.742403] env[61995]: ERROR nova.compute.manager [instance: b819920a-cc74-4718-a054-e81affabcd5b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 547.742403] env[61995]: ERROR nova.compute.manager [instance: b819920a-cc74-4718-a054-e81affabcd5b] raise self.value [ 547.742403] env[61995]: ERROR nova.compute.manager [instance: b819920a-cc74-4718-a054-e81affabcd5b] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 547.742403] env[61995]: ERROR nova.compute.manager [instance: b819920a-cc74-4718-a054-e81affabcd5b] updated_port = self._update_port( [ 547.742403] env[61995]: ERROR nova.compute.manager [instance: b819920a-cc74-4718-a054-e81affabcd5b] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 547.742403] env[61995]: ERROR nova.compute.manager [instance: b819920a-cc74-4718-a054-e81affabcd5b] _ensure_no_port_binding_failure(port) [ 547.742403] env[61995]: ERROR nova.compute.manager [instance: b819920a-cc74-4718-a054-e81affabcd5b] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 547.742403] env[61995]: ERROR nova.compute.manager [instance: b819920a-cc74-4718-a054-e81affabcd5b] raise exception.PortBindingFailed(port_id=port['id']) [ 547.742746] env[61995]: ERROR nova.compute.manager [instance: b819920a-cc74-4718-a054-e81affabcd5b] nova.exception.PortBindingFailed: Binding failed for port 3a82a148-8617-45cb-95d6-514d987604e1, please check neutron logs for more information. [ 547.742746] env[61995]: ERROR nova.compute.manager [instance: b819920a-cc74-4718-a054-e81affabcd5b] [ 547.742746] env[61995]: INFO nova.compute.manager [None req-eb6c806e-954e-45e9-85aa-88d75b833fb0 tempest-AttachVolumeTestJSON-1605116632 tempest-AttachVolumeTestJSON-1605116632-project-member] [instance: b819920a-cc74-4718-a054-e81affabcd5b] Terminating instance [ 547.742746] env[61995]: DEBUG oslo_concurrency.lockutils [None req-eb6c806e-954e-45e9-85aa-88d75b833fb0 tempest-AttachVolumeTestJSON-1605116632 tempest-AttachVolumeTestJSON-1605116632-project-member] Acquiring lock "refresh_cache-b819920a-cc74-4718-a054-e81affabcd5b" {{(pid=61995) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 547.742746] env[61995]: DEBUG oslo_concurrency.lockutils [None req-eb6c806e-954e-45e9-85aa-88d75b833fb0 tempest-AttachVolumeTestJSON-1605116632 tempest-AttachVolumeTestJSON-1605116632-project-member] Acquired lock "refresh_cache-b819920a-cc74-4718-a054-e81affabcd5b" {{(pid=61995) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 547.742746] env[61995]: DEBUG nova.network.neutron [None req-eb6c806e-954e-45e9-85aa-88d75b833fb0 tempest-AttachVolumeTestJSON-1605116632 tempest-AttachVolumeTestJSON-1605116632-project-member] [instance: b819920a-cc74-4718-a054-e81affabcd5b] Building network info cache for instance {{(pid=61995) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 547.742981] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-eb6c806e-954e-45e9-85aa-88d75b833fb0 tempest-AttachVolumeTestJSON-1605116632 tempest-AttachVolumeTestJSON-1605116632-project-member] Expecting reply to msg 76c5715a652145869145209da0280150 in queue reply_757213bc08bb49dab178826d88b76f40 [ 547.742981] env[61995]: DEBUG nova.scheduler.client.report [None req-a85a9074-04e3-4381-94ff-506e7a8711e5 tempest-AttachInterfacesV270Test-1294541225 tempest-AttachInterfacesV270Test-1294541225-project-member] Inventory has not changed for provider c68e615f-c0c6-4278-bb7b-a579e68878a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61995) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 547.742981] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-a85a9074-04e3-4381-94ff-506e7a8711e5 tempest-AttachInterfacesV270Test-1294541225 tempest-AttachInterfacesV270Test-1294541225-project-member] Expecting reply to msg 91fad3cea3834ee79269090e1e774919 in queue reply_757213bc08bb49dab178826d88b76f40 [ 547.751599] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 76c5715a652145869145209da0280150 [ 547.760727] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 91fad3cea3834ee79269090e1e774919 [ 547.761277] env[61995]: DEBUG oslo_vmware.api [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Task: {'id': task-378088, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.501811} completed successfully. {{(pid=61995) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 547.761514] env[61995]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/947125f0-9664-40eb-953e-b1373b076c9f/947125f0-9664-40eb-953e-b1373b076c9f.vmdk to [datastore2] 1bf7f97f-ccfe-4ee0-a6fb-2234769c4b62/1bf7f97f-ccfe-4ee0-a6fb-2234769c4b62.vmdk {{(pid=61995) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 547.762492] env[61995]: DEBUG nova.virt.vmwareapi.vmops [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] [instance: 1bf7f97f-ccfe-4ee0-a6fb-2234769c4b62] Extending root virtual disk to 1048576 {{(pid=61995) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 547.763034] env[61995]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-36de90c6-a29d-4eac-a1d6-d7737014fa9b {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.779371] env[61995]: DEBUG oslo_vmware.api [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Waiting for the task: (returnval){ [ 547.779371] env[61995]: value = "task-378089" [ 547.779371] env[61995]: _type = "Task" [ 547.779371] env[61995]: } to complete. {{(pid=61995) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 547.788881] env[61995]: DEBUG oslo_vmware.api [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Task: {'id': task-378089, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61995) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 548.096328] env[61995]: INFO nova.compute.manager [None req-ea640fa2-eaf2-48c6-9579-d88038be7ec7 tempest-MigrationsAdminTest-1704279807 tempest-MigrationsAdminTest-1704279807-project-member] [instance: b610e51c-f7a4-4e3a-85c1-28603fc82bab] Took 1.03 seconds to deallocate network for instance. [ 548.098134] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-ea640fa2-eaf2-48c6-9579-d88038be7ec7 tempest-MigrationsAdminTest-1704279807 tempest-MigrationsAdminTest-1704279807-project-member] Expecting reply to msg 55b8331f7cb848799a74360b28f0eefd in queue reply_757213bc08bb49dab178826d88b76f40 [ 548.147448] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 55b8331f7cb848799a74360b28f0eefd [ 548.246035] env[61995]: DEBUG oslo_concurrency.lockutils [None req-a85a9074-04e3-4381-94ff-506e7a8711e5 tempest-AttachInterfacesV270Test-1294541225 tempest-AttachInterfacesV270Test-1294541225-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.352s {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 548.246420] env[61995]: DEBUG nova.compute.manager [None req-a85a9074-04e3-4381-94ff-506e7a8711e5 tempest-AttachInterfacesV270Test-1294541225 tempest-AttachInterfacesV270Test-1294541225-project-member] [instance: 2dbe6731-bab0-4a47-91c7-a1d8b42dcec4] Start building networks asynchronously for instance. {{(pid=61995) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 548.248108] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-a85a9074-04e3-4381-94ff-506e7a8711e5 tempest-AttachInterfacesV270Test-1294541225 tempest-AttachInterfacesV270Test-1294541225-project-member] Expecting reply to msg 992b6671b0944a2fb3662d8bba68ac8a in queue reply_757213bc08bb49dab178826d88b76f40 [ 548.249335] env[61995]: DEBUG oslo_concurrency.lockutils [None req-f3a8686d-cadc-4239-bfc5-8c2bb0366eb3 tempest-ServerDiagnosticsTest-1806019006 tempest-ServerDiagnosticsTest-1806019006-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 9.296s {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 548.251080] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-f3a8686d-cadc-4239-bfc5-8c2bb0366eb3 tempest-ServerDiagnosticsTest-1806019006 tempest-ServerDiagnosticsTest-1806019006-project-member] Expecting reply to msg 77a2c85085f44324ad97d6062a6bf7c3 in queue reply_757213bc08bb49dab178826d88b76f40 [ 548.279443] env[61995]: DEBUG nova.network.neutron [None req-eb6c806e-954e-45e9-85aa-88d75b833fb0 tempest-AttachVolumeTestJSON-1605116632 tempest-AttachVolumeTestJSON-1605116632-project-member] [instance: b819920a-cc74-4718-a054-e81affabcd5b] Instance cache missing network info. {{(pid=61995) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 548.290987] env[61995]: DEBUG oslo_vmware.api [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Task: {'id': task-378089, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06742} completed successfully. {{(pid=61995) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 548.293273] env[61995]: DEBUG nova.virt.vmwareapi.vmops [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] [instance: 1bf7f97f-ccfe-4ee0-a6fb-2234769c4b62] Extended root virtual disk {{(pid=61995) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 548.293273] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-072c3ca1-9cd6-4da1-9f58-87a2681f7a1f {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.296555] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 992b6671b0944a2fb3662d8bba68ac8a [ 548.323516] env[61995]: DEBUG nova.virt.vmwareapi.volumeops [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] [instance: 1bf7f97f-ccfe-4ee0-a6fb-2234769c4b62] Reconfiguring VM instance instance-00000001 to attach disk [datastore2] 1bf7f97f-ccfe-4ee0-a6fb-2234769c4b62/1bf7f97f-ccfe-4ee0-a6fb-2234769c4b62.vmdk or device None with type sparse {{(pid=61995) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 548.324283] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 77a2c85085f44324ad97d6062a6bf7c3 [ 548.324781] env[61995]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ae9d3730-d4e5-45ba-8b49-55f6d56049d2 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.349731] env[61995]: DEBUG oslo_vmware.api [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Waiting for the task: (returnval){ [ 548.349731] env[61995]: value = "task-378090" [ 548.349731] env[61995]: _type = "Task" [ 548.349731] env[61995]: } to complete. {{(pid=61995) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 548.361351] env[61995]: DEBUG oslo_vmware.api [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Task: {'id': task-378090, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61995) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 548.394514] env[61995]: DEBUG nova.network.neutron [None req-eb6c806e-954e-45e9-85aa-88d75b833fb0 tempest-AttachVolumeTestJSON-1605116632 tempest-AttachVolumeTestJSON-1605116632-project-member] [instance: b819920a-cc74-4718-a054-e81affabcd5b] Updating instance_info_cache with network_info: [] {{(pid=61995) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 548.395102] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-eb6c806e-954e-45e9-85aa-88d75b833fb0 tempest-AttachVolumeTestJSON-1605116632 tempest-AttachVolumeTestJSON-1605116632-project-member] Expecting reply to msg 21a9d559ad124eaab9d048b12261231f in queue reply_757213bc08bb49dab178826d88b76f40 [ 548.405356] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 21a9d559ad124eaab9d048b12261231f [ 548.606243] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-ea640fa2-eaf2-48c6-9579-d88038be7ec7 tempest-MigrationsAdminTest-1704279807 tempest-MigrationsAdminTest-1704279807-project-member] Expecting reply to msg 32f18ff4cadc42bb84411e70ec5873ef in queue reply_757213bc08bb49dab178826d88b76f40 [ 548.648086] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 32f18ff4cadc42bb84411e70ec5873ef [ 548.715134] env[61995]: DEBUG nova.compute.manager [req-b007ec48-9847-4aa4-acaa-035ddd563f09 req-e3f1a1bb-d362-482f-89c9-f3ad124731d3 service nova] [instance: b819920a-cc74-4718-a054-e81affabcd5b] Received event network-changed-3a82a148-8617-45cb-95d6-514d987604e1 {{(pid=61995) external_instance_event /opt/stack/nova/nova/compute/manager.py:11144}} [ 548.715349] env[61995]: DEBUG nova.compute.manager [req-b007ec48-9847-4aa4-acaa-035ddd563f09 req-e3f1a1bb-d362-482f-89c9-f3ad124731d3 service nova] [instance: b819920a-cc74-4718-a054-e81affabcd5b] Refreshing instance network info cache due to event network-changed-3a82a148-8617-45cb-95d6-514d987604e1. {{(pid=61995) external_instance_event /opt/stack/nova/nova/compute/manager.py:11149}} [ 548.715537] env[61995]: DEBUG oslo_concurrency.lockutils [req-b007ec48-9847-4aa4-acaa-035ddd563f09 req-e3f1a1bb-d362-482f-89c9-f3ad124731d3 service nova] Acquiring lock "refresh_cache-b819920a-cc74-4718-a054-e81affabcd5b" {{(pid=61995) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 548.754555] env[61995]: DEBUG nova.compute.utils [None req-a85a9074-04e3-4381-94ff-506e7a8711e5 tempest-AttachInterfacesV270Test-1294541225 tempest-AttachInterfacesV270Test-1294541225-project-member] Using /dev/sd instead of None {{(pid=61995) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 548.755344] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-a85a9074-04e3-4381-94ff-506e7a8711e5 tempest-AttachInterfacesV270Test-1294541225 tempest-AttachInterfacesV270Test-1294541225-project-member] Expecting reply to msg 4100a12f9a6a486d8c76099e7a5dfe92 in queue reply_757213bc08bb49dab178826d88b76f40 [ 548.760104] env[61995]: DEBUG nova.compute.manager [None req-a85a9074-04e3-4381-94ff-506e7a8711e5 tempest-AttachInterfacesV270Test-1294541225 tempest-AttachInterfacesV270Test-1294541225-project-member] [instance: 2dbe6731-bab0-4a47-91c7-a1d8b42dcec4] Allocating IP information in the background. {{(pid=61995) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1990}} [ 548.760284] env[61995]: DEBUG nova.network.neutron [None req-a85a9074-04e3-4381-94ff-506e7a8711e5 tempest-AttachInterfacesV270Test-1294541225 tempest-AttachInterfacesV270Test-1294541225-project-member] [instance: 2dbe6731-bab0-4a47-91c7-a1d8b42dcec4] allocate_for_instance() {{(pid=61995) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 548.765205] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4100a12f9a6a486d8c76099e7a5dfe92 [ 548.814218] env[61995]: DEBUG nova.policy [None req-a85a9074-04e3-4381-94ff-506e7a8711e5 tempest-AttachInterfacesV270Test-1294541225 tempest-AttachInterfacesV270Test-1294541225-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2a189e739e2545a99d2ae999e02eda70', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9dc9f3a352e74260a46df71823ed40f5', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61995) authorize /opt/stack/nova/nova/policy.py:203}} [ 548.863397] env[61995]: DEBUG oslo_vmware.api [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Task: {'id': task-378090, 'name': ReconfigVM_Task, 'duration_secs': 0.311398} completed successfully. {{(pid=61995) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 548.863740] env[61995]: DEBUG nova.virt.vmwareapi.volumeops [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] [instance: 1bf7f97f-ccfe-4ee0-a6fb-2234769c4b62] Reconfigured VM instance instance-00000001 to attach disk [datastore2] 1bf7f97f-ccfe-4ee0-a6fb-2234769c4b62/1bf7f97f-ccfe-4ee0-a6fb-2234769c4b62.vmdk or device None with type sparse {{(pid=61995) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 548.864296] env[61995]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-08f0405b-8ce7-4e32-b42b-75ba20adfdd7 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.871049] env[61995]: DEBUG oslo_vmware.api [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Waiting for the task: (returnval){ [ 548.871049] env[61995]: value = "task-378091" [ 548.871049] env[61995]: _type = "Task" [ 548.871049] env[61995]: } to complete. {{(pid=61995) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 548.882202] env[61995]: DEBUG oslo_vmware.api [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Task: {'id': task-378091, 'name': Rename_Task} progress is 5%. {{(pid=61995) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 548.897174] env[61995]: DEBUG oslo_concurrency.lockutils [None req-eb6c806e-954e-45e9-85aa-88d75b833fb0 tempest-AttachVolumeTestJSON-1605116632 tempest-AttachVolumeTestJSON-1605116632-project-member] Releasing lock "refresh_cache-b819920a-cc74-4718-a054-e81affabcd5b" {{(pid=61995) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 548.897701] env[61995]: DEBUG nova.compute.manager [None req-eb6c806e-954e-45e9-85aa-88d75b833fb0 tempest-AttachVolumeTestJSON-1605116632 tempest-AttachVolumeTestJSON-1605116632-project-member] [instance: b819920a-cc74-4718-a054-e81affabcd5b] Start destroying the instance on the hypervisor. {{(pid=61995) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 548.903285] env[61995]: DEBUG nova.virt.vmwareapi.vmops [None req-eb6c806e-954e-45e9-85aa-88d75b833fb0 tempest-AttachVolumeTestJSON-1605116632 tempest-AttachVolumeTestJSON-1605116632-project-member] [instance: b819920a-cc74-4718-a054-e81affabcd5b] Destroying instance {{(pid=61995) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 548.904133] env[61995]: DEBUG oslo_concurrency.lockutils [req-b007ec48-9847-4aa4-acaa-035ddd563f09 req-e3f1a1bb-d362-482f-89c9-f3ad124731d3 service nova] Acquired lock "refresh_cache-b819920a-cc74-4718-a054-e81affabcd5b" {{(pid=61995) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 548.904510] env[61995]: DEBUG nova.network.neutron [req-b007ec48-9847-4aa4-acaa-035ddd563f09 req-e3f1a1bb-d362-482f-89c9-f3ad124731d3 service nova] [instance: b819920a-cc74-4718-a054-e81affabcd5b] Refreshing network info cache for port 3a82a148-8617-45cb-95d6-514d987604e1 {{(pid=61995) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 548.905089] env[61995]: INFO oslo_messaging._drivers.amqpdriver [req-b007ec48-9847-4aa4-acaa-035ddd563f09 req-e3f1a1bb-d362-482f-89c9-f3ad124731d3 service nova] Expecting reply to msg 48d4a49ae7724c12b569695d3c605e47 in queue reply_757213bc08bb49dab178826d88b76f40 [ 548.906297] env[61995]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0503de04-6e0f-4606-86a6-5d104690a4a3 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.916200] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e852b65-fedc-46c3-896a-645e2ff1be39 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.930493] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 48d4a49ae7724c12b569695d3c605e47 [ 548.946552] env[61995]: WARNING nova.virt.vmwareapi.vmops [None req-eb6c806e-954e-45e9-85aa-88d75b833fb0 tempest-AttachVolumeTestJSON-1605116632 tempest-AttachVolumeTestJSON-1605116632-project-member] [instance: b819920a-cc74-4718-a054-e81affabcd5b] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance b819920a-cc74-4718-a054-e81affabcd5b could not be found. [ 548.946804] env[61995]: DEBUG nova.virt.vmwareapi.vmops [None req-eb6c806e-954e-45e9-85aa-88d75b833fb0 tempest-AttachVolumeTestJSON-1605116632 tempest-AttachVolumeTestJSON-1605116632-project-member] [instance: b819920a-cc74-4718-a054-e81affabcd5b] Instance destroyed {{(pid=61995) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 548.946979] env[61995]: INFO nova.compute.manager [None req-eb6c806e-954e-45e9-85aa-88d75b833fb0 tempest-AttachVolumeTestJSON-1605116632 tempest-AttachVolumeTestJSON-1605116632-project-member] [instance: b819920a-cc74-4718-a054-e81affabcd5b] Took 0.05 seconds to destroy the instance on the hypervisor. [ 548.947226] env[61995]: DEBUG oslo.service.loopingcall [None req-eb6c806e-954e-45e9-85aa-88d75b833fb0 tempest-AttachVolumeTestJSON-1605116632 tempest-AttachVolumeTestJSON-1605116632-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61995) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 548.950751] env[61995]: DEBUG nova.compute.manager [-] [instance: b819920a-cc74-4718-a054-e81affabcd5b] Deallocating network for instance {{(pid=61995) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2298}} [ 548.950869] env[61995]: DEBUG nova.network.neutron [-] [instance: b819920a-cc74-4718-a054-e81affabcd5b] deallocate_for_instance() {{(pid=61995) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 548.979239] env[61995]: DEBUG nova.network.neutron [-] [instance: b819920a-cc74-4718-a054-e81affabcd5b] Instance cache missing network info. {{(pid=61995) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 548.979804] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 4dfed2f2236947a48b974207d75b8d0b in queue reply_757213bc08bb49dab178826d88b76f40 [ 548.999095] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4dfed2f2236947a48b974207d75b8d0b [ 549.016782] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a5a6b15-1788-4a1c-93a5-de63a1cba4a3 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.026289] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be7a2a70-2c4e-4ca5-99eb-636c15804e46 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.068306] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26af29dc-4c18-41df-98bf-ce2dc4c35d14 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.076488] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5783f694-5043-4f54-9e1d-b9d233a042a3 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.091179] env[61995]: DEBUG nova.compute.provider_tree [None req-f3a8686d-cadc-4239-bfc5-8c2bb0366eb3 tempest-ServerDiagnosticsTest-1806019006 tempest-ServerDiagnosticsTest-1806019006-project-member] Inventory has not changed in ProviderTree for provider: c68e615f-c0c6-4278-bb7b-a579e68878a7 {{(pid=61995) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 549.091784] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-f3a8686d-cadc-4239-bfc5-8c2bb0366eb3 tempest-ServerDiagnosticsTest-1806019006 tempest-ServerDiagnosticsTest-1806019006-project-member] Expecting reply to msg f564cde695384258be82b5f9dbd38ebc in queue reply_757213bc08bb49dab178826d88b76f40 [ 549.099087] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f564cde695384258be82b5f9dbd38ebc [ 549.130971] env[61995]: INFO nova.scheduler.client.report [None req-ea640fa2-eaf2-48c6-9579-d88038be7ec7 tempest-MigrationsAdminTest-1704279807 tempest-MigrationsAdminTest-1704279807-project-member] Deleted allocations for instance b610e51c-f7a4-4e3a-85c1-28603fc82bab [ 549.136488] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-ea640fa2-eaf2-48c6-9579-d88038be7ec7 tempest-MigrationsAdminTest-1704279807 tempest-MigrationsAdminTest-1704279807-project-member] Expecting reply to msg 7d6051d311954d95b5640617bec73cd6 in queue reply_757213bc08bb49dab178826d88b76f40 [ 549.156386] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7d6051d311954d95b5640617bec73cd6 [ 549.257864] env[61995]: DEBUG nova.compute.manager [None req-a85a9074-04e3-4381-94ff-506e7a8711e5 tempest-AttachInterfacesV270Test-1294541225 tempest-AttachInterfacesV270Test-1294541225-project-member] [instance: 2dbe6731-bab0-4a47-91c7-a1d8b42dcec4] Start building block device mappings for instance. {{(pid=61995) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 549.259720] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-a85a9074-04e3-4381-94ff-506e7a8711e5 tempest-AttachInterfacesV270Test-1294541225 tempest-AttachInterfacesV270Test-1294541225-project-member] Expecting reply to msg 603a69bd1d5b430c8f7bf89ad517ac53 in queue reply_757213bc08bb49dab178826d88b76f40 [ 549.291653] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 603a69bd1d5b430c8f7bf89ad517ac53 [ 549.383913] env[61995]: DEBUG oslo_vmware.api [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Task: {'id': task-378091, 'name': Rename_Task, 'duration_secs': 0.144808} completed successfully. {{(pid=61995) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 549.384323] env[61995]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] [instance: 1bf7f97f-ccfe-4ee0-a6fb-2234769c4b62] Powering on the VM {{(pid=61995) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 549.384494] env[61995]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3da454a9-e6b6-4cce-a08b-c1babf40272b {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.390177] env[61995]: DEBUG nova.network.neutron [None req-a85a9074-04e3-4381-94ff-506e7a8711e5 tempest-AttachInterfacesV270Test-1294541225 tempest-AttachInterfacesV270Test-1294541225-project-member] [instance: 2dbe6731-bab0-4a47-91c7-a1d8b42dcec4] Successfully created port: c2d91fee-658c-42ae-9503-e12f3995ca7e {{(pid=61995) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 549.393411] env[61995]: DEBUG oslo_vmware.api [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Waiting for the task: (returnval){ [ 549.393411] env[61995]: value = "task-378092" [ 549.393411] env[61995]: _type = "Task" [ 549.393411] env[61995]: } to complete. {{(pid=61995) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 549.401296] env[61995]: DEBUG oslo_vmware.api [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Task: {'id': task-378092, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61995) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 549.440076] env[61995]: DEBUG nova.network.neutron [req-b007ec48-9847-4aa4-acaa-035ddd563f09 req-e3f1a1bb-d362-482f-89c9-f3ad124731d3 service nova] [instance: b819920a-cc74-4718-a054-e81affabcd5b] Instance cache missing network info. {{(pid=61995) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 549.481753] env[61995]: DEBUG nova.network.neutron [-] [instance: b819920a-cc74-4718-a054-e81affabcd5b] Updating instance_info_cache with network_info: [] {{(pid=61995) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 549.482263] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg a99b16e74a38487b8f85534f425eeaed in queue reply_757213bc08bb49dab178826d88b76f40 [ 549.494946] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a99b16e74a38487b8f85534f425eeaed [ 549.595632] env[61995]: DEBUG nova.scheduler.client.report [None req-f3a8686d-cadc-4239-bfc5-8c2bb0366eb3 tempest-ServerDiagnosticsTest-1806019006 tempest-ServerDiagnosticsTest-1806019006-project-member] Inventory has not changed for provider c68e615f-c0c6-4278-bb7b-a579e68878a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61995) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 549.598116] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-f3a8686d-cadc-4239-bfc5-8c2bb0366eb3 tempest-ServerDiagnosticsTest-1806019006 tempest-ServerDiagnosticsTest-1806019006-project-member] Expecting reply to msg 25df924bb5834c00ba814d8bfc067d57 in queue reply_757213bc08bb49dab178826d88b76f40 [ 549.621052] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 25df924bb5834c00ba814d8bfc067d57 [ 549.633169] env[61995]: DEBUG nova.compute.manager [None req-39158509-88b0-4b72-b965-6c7bae80a1ca tempest-ServerDiagnosticsV248Test-1964762263 tempest-ServerDiagnosticsV248Test-1964762263-project-admin] [instance: 5527576a-d56d-42c3-a7f7-02c66c0d1b3d] Checking state {{(pid=61995) _get_power_state /opt/stack/nova/nova/compute/manager.py:1800}} [ 549.634257] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f109087-01df-4b18-b700-85f209ef2afa {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.640954] env[61995]: DEBUG nova.network.neutron [req-b007ec48-9847-4aa4-acaa-035ddd563f09 req-e3f1a1bb-d362-482f-89c9-f3ad124731d3 service nova] [instance: b819920a-cc74-4718-a054-e81affabcd5b] Updating instance_info_cache with network_info: [] {{(pid=61995) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 549.640954] env[61995]: INFO oslo_messaging._drivers.amqpdriver [req-b007ec48-9847-4aa4-acaa-035ddd563f09 req-e3f1a1bb-d362-482f-89c9-f3ad124731d3 service nova] Expecting reply to msg befbc78325fe4c33bd8a4d64f2da46cb in queue reply_757213bc08bb49dab178826d88b76f40 [ 549.643793] env[61995]: INFO nova.compute.manager [None req-39158509-88b0-4b72-b965-6c7bae80a1ca tempest-ServerDiagnosticsV248Test-1964762263 tempest-ServerDiagnosticsV248Test-1964762263-project-admin] [instance: 5527576a-d56d-42c3-a7f7-02c66c0d1b3d] Retrieving diagnostics [ 549.646394] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81fbaf53-4285-494f-936d-f4e7577fa082 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.678234] env[61995]: DEBUG oslo_concurrency.lockutils [None req-ea640fa2-eaf2-48c6-9579-d88038be7ec7 tempest-MigrationsAdminTest-1704279807 tempest-MigrationsAdminTest-1704279807-project-member] Lock "b610e51c-f7a4-4e3a-85c1-28603fc82bab" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.286s {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 549.679626] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg befbc78325fe4c33bd8a4d64f2da46cb [ 549.682505] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-182cf4eb-3c80-4ae3-ba78-5058296d1da7 tempest-ServersAdminTestJSON-252110596 tempest-ServersAdminTestJSON-252110596-project-member] Expecting reply to msg d931d9a39297459cbec051d94220afb3 in queue reply_757213bc08bb49dab178826d88b76f40 [ 549.693971] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d931d9a39297459cbec051d94220afb3 [ 549.765623] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-a85a9074-04e3-4381-94ff-506e7a8711e5 tempest-AttachInterfacesV270Test-1294541225 tempest-AttachInterfacesV270Test-1294541225-project-member] Expecting reply to msg 3100c4a45ba84bdcb262c9d391afcb21 in queue reply_757213bc08bb49dab178826d88b76f40 [ 549.827913] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3100c4a45ba84bdcb262c9d391afcb21 [ 549.903641] env[61995]: DEBUG oslo_vmware.api [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Task: {'id': task-378092, 'name': PowerOnVM_Task, 'duration_secs': 0.439734} completed successfully. {{(pid=61995) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 549.903927] env[61995]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] [instance: 1bf7f97f-ccfe-4ee0-a6fb-2234769c4b62] Powered on the VM {{(pid=61995) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 549.904193] env[61995]: DEBUG nova.compute.manager [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] [instance: 1bf7f97f-ccfe-4ee0-a6fb-2234769c4b62] Checking state {{(pid=61995) _get_power_state /opt/stack/nova/nova/compute/manager.py:1800}} [ 549.904983] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afa538e7-da72-4581-b469-f7e6c82961cd {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.912699] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Expecting reply to msg e65e71c527bd42e6a54e35c307108543 in queue reply_757213bc08bb49dab178826d88b76f40 [ 549.984902] env[61995]: INFO nova.compute.manager [-] [instance: b819920a-cc74-4718-a054-e81affabcd5b] Took 1.03 seconds to deallocate network for instance. [ 549.987719] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e65e71c527bd42e6a54e35c307108543 [ 549.988338] env[61995]: DEBUG nova.compute.claims [None req-eb6c806e-954e-45e9-85aa-88d75b833fb0 tempest-AttachVolumeTestJSON-1605116632 tempest-AttachVolumeTestJSON-1605116632-project-member] [instance: b819920a-cc74-4718-a054-e81affabcd5b] Aborting claim: {{(pid=61995) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 549.988508] env[61995]: DEBUG oslo_concurrency.lockutils [None req-eb6c806e-954e-45e9-85aa-88d75b833fb0 tempest-AttachVolumeTestJSON-1605116632 tempest-AttachVolumeTestJSON-1605116632-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 550.104136] env[61995]: DEBUG oslo_concurrency.lockutils [None req-f3a8686d-cadc-4239-bfc5-8c2bb0366eb3 tempest-ServerDiagnosticsTest-1806019006 tempest-ServerDiagnosticsTest-1806019006-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.853s {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 550.104136] env[61995]: ERROR nova.compute.manager [None req-f3a8686d-cadc-4239-bfc5-8c2bb0366eb3 tempest-ServerDiagnosticsTest-1806019006 tempest-ServerDiagnosticsTest-1806019006-project-member] [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 44c93bf1-8e65-4c5c-bbf7-fa9b74625351, please check neutron logs for more information. [ 550.104136] env[61995]: ERROR nova.compute.manager [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] Traceback (most recent call last): [ 550.104136] env[61995]: ERROR nova.compute.manager [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] File "/opt/stack/nova/nova/compute/manager.py", line 2650, in _build_and_run_instance [ 550.104136] env[61995]: ERROR nova.compute.manager [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] self.driver.spawn(context, instance, image_meta, [ 550.104136] env[61995]: ERROR nova.compute.manager [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 550.104136] env[61995]: ERROR nova.compute.manager [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] self._vmops.spawn(context, instance, image_meta, injected_files, [ 550.104136] env[61995]: ERROR nova.compute.manager [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 550.104136] env[61995]: ERROR nova.compute.manager [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] vm_ref = self.build_virtual_machine(instance, [ 550.104528] env[61995]: ERROR nova.compute.manager [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 550.104528] env[61995]: ERROR nova.compute.manager [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] vif_infos = vmwarevif.get_vif_info(self._session, [ 550.104528] env[61995]: ERROR nova.compute.manager [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 550.104528] env[61995]: ERROR nova.compute.manager [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] for vif in network_info: [ 550.104528] env[61995]: ERROR nova.compute.manager [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 550.104528] env[61995]: ERROR nova.compute.manager [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] return self._sync_wrapper(fn, *args, **kwargs) [ 550.104528] env[61995]: ERROR nova.compute.manager [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 550.104528] env[61995]: ERROR nova.compute.manager [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] self.wait() [ 550.104528] env[61995]: ERROR nova.compute.manager [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 550.104528] env[61995]: ERROR nova.compute.manager [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] self[:] = self._gt.wait() [ 550.104528] env[61995]: ERROR nova.compute.manager [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 550.104528] env[61995]: ERROR nova.compute.manager [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] return self._exit_event.wait() [ 550.104528] env[61995]: ERROR nova.compute.manager [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 550.104931] env[61995]: ERROR nova.compute.manager [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] result = hub.switch() [ 550.104931] env[61995]: ERROR nova.compute.manager [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 550.104931] env[61995]: ERROR nova.compute.manager [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] return self.greenlet.switch() [ 550.104931] env[61995]: ERROR nova.compute.manager [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 550.104931] env[61995]: ERROR nova.compute.manager [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] result = function(*args, **kwargs) [ 550.104931] env[61995]: ERROR nova.compute.manager [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 550.104931] env[61995]: ERROR nova.compute.manager [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] return func(*args, **kwargs) [ 550.104931] env[61995]: ERROR nova.compute.manager [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] File "/opt/stack/nova/nova/compute/manager.py", line 2020, in _allocate_network_async [ 550.104931] env[61995]: ERROR nova.compute.manager [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] raise e [ 550.104931] env[61995]: ERROR nova.compute.manager [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] File "/opt/stack/nova/nova/compute/manager.py", line 1998, in _allocate_network_async [ 550.104931] env[61995]: ERROR nova.compute.manager [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] nwinfo = self.network_api.allocate_for_instance( [ 550.104931] env[61995]: ERROR nova.compute.manager [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 550.104931] env[61995]: ERROR nova.compute.manager [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] created_port_ids = self._update_ports_for_instance( [ 550.105294] env[61995]: ERROR nova.compute.manager [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 550.105294] env[61995]: ERROR nova.compute.manager [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] with excutils.save_and_reraise_exception(): [ 550.105294] env[61995]: ERROR nova.compute.manager [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 550.105294] env[61995]: ERROR nova.compute.manager [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] self.force_reraise() [ 550.105294] env[61995]: ERROR nova.compute.manager [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 550.105294] env[61995]: ERROR nova.compute.manager [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] raise self.value [ 550.105294] env[61995]: ERROR nova.compute.manager [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 550.105294] env[61995]: ERROR nova.compute.manager [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] updated_port = self._update_port( [ 550.105294] env[61995]: ERROR nova.compute.manager [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 550.105294] env[61995]: ERROR nova.compute.manager [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] _ensure_no_port_binding_failure(port) [ 550.105294] env[61995]: ERROR nova.compute.manager [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 550.105294] env[61995]: ERROR nova.compute.manager [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] raise exception.PortBindingFailed(port_id=port['id']) [ 550.105594] env[61995]: ERROR nova.compute.manager [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] nova.exception.PortBindingFailed: Binding failed for port 44c93bf1-8e65-4c5c-bbf7-fa9b74625351, please check neutron logs for more information. [ 550.105594] env[61995]: ERROR nova.compute.manager [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] [ 550.105594] env[61995]: DEBUG nova.compute.utils [None req-f3a8686d-cadc-4239-bfc5-8c2bb0366eb3 tempest-ServerDiagnosticsTest-1806019006 tempest-ServerDiagnosticsTest-1806019006-project-member] [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] Binding failed for port 44c93bf1-8e65-4c5c-bbf7-fa9b74625351, please check neutron logs for more information. {{(pid=61995) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 550.105594] env[61995]: DEBUG oslo_concurrency.lockutils [None req-95ce5374-e6a2-4567-a19e-2479f98bd091 tempest-VolumesAssistedSnapshotsTest-926918021 tempest-VolumesAssistedSnapshotsTest-926918021-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.578s {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 550.106179] env[61995]: INFO nova.compute.claims [None req-95ce5374-e6a2-4567-a19e-2479f98bd091 tempest-VolumesAssistedSnapshotsTest-926918021 tempest-VolumesAssistedSnapshotsTest-926918021-project-member] [instance: 6fffc743-caaa-4356-9406-6bdc6321aa1f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 550.110611] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-95ce5374-e6a2-4567-a19e-2479f98bd091 tempest-VolumesAssistedSnapshotsTest-926918021 tempest-VolumesAssistedSnapshotsTest-926918021-project-member] Expecting reply to msg f49e13036b4c496e8582beeafabd97d5 in queue reply_757213bc08bb49dab178826d88b76f40 [ 550.123857] env[61995]: DEBUG nova.compute.manager [None req-f3a8686d-cadc-4239-bfc5-8c2bb0366eb3 tempest-ServerDiagnosticsTest-1806019006 tempest-ServerDiagnosticsTest-1806019006-project-member] [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] Build of instance 2a07c11a-4e95-47db-bf50-5ad720403faa was re-scheduled: Binding failed for port 44c93bf1-8e65-4c5c-bbf7-fa9b74625351, please check neutron logs for more information. {{(pid=61995) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2489}} [ 550.124489] env[61995]: DEBUG nova.compute.manager [None req-f3a8686d-cadc-4239-bfc5-8c2bb0366eb3 tempest-ServerDiagnosticsTest-1806019006 tempest-ServerDiagnosticsTest-1806019006-project-member] [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] Unplugging VIFs for instance {{(pid=61995) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3015}} [ 550.124740] env[61995]: DEBUG oslo_concurrency.lockutils [None req-f3a8686d-cadc-4239-bfc5-8c2bb0366eb3 tempest-ServerDiagnosticsTest-1806019006 tempest-ServerDiagnosticsTest-1806019006-project-member] Acquiring lock "refresh_cache-2a07c11a-4e95-47db-bf50-5ad720403faa" {{(pid=61995) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 550.125316] env[61995]: DEBUG oslo_concurrency.lockutils [None req-f3a8686d-cadc-4239-bfc5-8c2bb0366eb3 tempest-ServerDiagnosticsTest-1806019006 tempest-ServerDiagnosticsTest-1806019006-project-member] Acquired lock "refresh_cache-2a07c11a-4e95-47db-bf50-5ad720403faa" {{(pid=61995) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 550.125316] env[61995]: DEBUG nova.network.neutron [None req-f3a8686d-cadc-4239-bfc5-8c2bb0366eb3 tempest-ServerDiagnosticsTest-1806019006 tempest-ServerDiagnosticsTest-1806019006-project-member] [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] Building network info cache for instance {{(pid=61995) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 550.125463] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-f3a8686d-cadc-4239-bfc5-8c2bb0366eb3 tempest-ServerDiagnosticsTest-1806019006 tempest-ServerDiagnosticsTest-1806019006-project-member] Expecting reply to msg 473ed75ce9874afca7db47091b2160c5 in queue reply_757213bc08bb49dab178826d88b76f40 [ 550.150381] env[61995]: DEBUG oslo_concurrency.lockutils [req-b007ec48-9847-4aa4-acaa-035ddd563f09 req-e3f1a1bb-d362-482f-89c9-f3ad124731d3 service nova] Releasing lock "refresh_cache-b819920a-cc74-4718-a054-e81affabcd5b" {{(pid=61995) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 550.156578] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 473ed75ce9874afca7db47091b2160c5 [ 550.173547] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f49e13036b4c496e8582beeafabd97d5 [ 550.187017] env[61995]: DEBUG nova.compute.manager [None req-182cf4eb-3c80-4ae3-ba78-5058296d1da7 tempest-ServersAdminTestJSON-252110596 tempest-ServersAdminTestJSON-252110596-project-member] [instance: eaa96051-2a7e-4b80-a88b-5bb4faccc8c4] Starting instance... {{(pid=61995) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 550.187017] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-182cf4eb-3c80-4ae3-ba78-5058296d1da7 tempest-ServersAdminTestJSON-252110596 tempest-ServersAdminTestJSON-252110596-project-member] Expecting reply to msg f76591c03287420dac98b2ceb90dfef9 in queue reply_757213bc08bb49dab178826d88b76f40 [ 550.224111] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f76591c03287420dac98b2ceb90dfef9 [ 550.271491] env[61995]: DEBUG nova.compute.manager [None req-a85a9074-04e3-4381-94ff-506e7a8711e5 tempest-AttachInterfacesV270Test-1294541225 tempest-AttachInterfacesV270Test-1294541225-project-member] [instance: 2dbe6731-bab0-4a47-91c7-a1d8b42dcec4] Start spawning the instance on the hypervisor. {{(pid=61995) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 550.316149] env[61995]: DEBUG nova.virt.hardware [None req-a85a9074-04e3-4381-94ff-506e7a8711e5 tempest-AttachInterfacesV270Test-1294541225 tempest-AttachInterfacesV270Test-1294541225-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-06T07:57:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-06T07:57:11Z,direct_url=,disk_format='vmdk',id=947125f0-9664-40eb-953e-b1373b076c9f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3dc1e6434ef44fd0be01fd4ebf2b62ef',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-06T07:57:12Z,virtual_size=,visibility=), allow threads: False {{(pid=61995) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 550.316416] env[61995]: DEBUG nova.virt.hardware [None req-a85a9074-04e3-4381-94ff-506e7a8711e5 tempest-AttachInterfacesV270Test-1294541225 tempest-AttachInterfacesV270Test-1294541225-project-member] Flavor limits 0:0:0 {{(pid=61995) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 550.316563] env[61995]: DEBUG nova.virt.hardware [None req-a85a9074-04e3-4381-94ff-506e7a8711e5 tempest-AttachInterfacesV270Test-1294541225 tempest-AttachInterfacesV270Test-1294541225-project-member] Image limits 0:0:0 {{(pid=61995) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 550.316736] env[61995]: DEBUG nova.virt.hardware [None req-a85a9074-04e3-4381-94ff-506e7a8711e5 tempest-AttachInterfacesV270Test-1294541225 tempest-AttachInterfacesV270Test-1294541225-project-member] Flavor pref 0:0:0 {{(pid=61995) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 550.316870] env[61995]: DEBUG nova.virt.hardware [None req-a85a9074-04e3-4381-94ff-506e7a8711e5 tempest-AttachInterfacesV270Test-1294541225 tempest-AttachInterfacesV270Test-1294541225-project-member] Image pref 0:0:0 {{(pid=61995) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 550.317006] env[61995]: DEBUG nova.virt.hardware [None req-a85a9074-04e3-4381-94ff-506e7a8711e5 tempest-AttachInterfacesV270Test-1294541225 tempest-AttachInterfacesV270Test-1294541225-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61995) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 550.317209] env[61995]: DEBUG nova.virt.hardware [None req-a85a9074-04e3-4381-94ff-506e7a8711e5 tempest-AttachInterfacesV270Test-1294541225 tempest-AttachInterfacesV270Test-1294541225-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61995) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 550.317362] env[61995]: DEBUG nova.virt.hardware [None req-a85a9074-04e3-4381-94ff-506e7a8711e5 tempest-AttachInterfacesV270Test-1294541225 tempest-AttachInterfacesV270Test-1294541225-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61995) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 550.317609] env[61995]: DEBUG nova.virt.hardware [None req-a85a9074-04e3-4381-94ff-506e7a8711e5 tempest-AttachInterfacesV270Test-1294541225 tempest-AttachInterfacesV270Test-1294541225-project-member] Got 1 possible topologies {{(pid=61995) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 550.317699] env[61995]: DEBUG nova.virt.hardware [None req-a85a9074-04e3-4381-94ff-506e7a8711e5 tempest-AttachInterfacesV270Test-1294541225 tempest-AttachInterfacesV270Test-1294541225-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61995) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 550.317815] env[61995]: DEBUG nova.virt.hardware [None req-a85a9074-04e3-4381-94ff-506e7a8711e5 tempest-AttachInterfacesV270Test-1294541225 tempest-AttachInterfacesV270Test-1294541225-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61995) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 550.318681] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-168c7cf8-0764-4010-b92e-61ce4b9036f1 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.326757] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60454ff2-787a-4209-a69a-67dcc16d89e0 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.423058] env[61995]: DEBUG oslo_concurrency.lockutils [None req-c3b17939-55d5-4305-be56-3fe7a96f5881 tempest-ServersAdmin275Test-1777179720 tempest-ServersAdmin275Test-1777179720-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 550.625612] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-95ce5374-e6a2-4567-a19e-2479f98bd091 tempest-VolumesAssistedSnapshotsTest-926918021 tempest-VolumesAssistedSnapshotsTest-926918021-project-member] Expecting reply to msg fc277778a4c141aaa448564269741318 in queue reply_757213bc08bb49dab178826d88b76f40 [ 550.634337] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fc277778a4c141aaa448564269741318 [ 550.704042] env[61995]: DEBUG nova.network.neutron [None req-f3a8686d-cadc-4239-bfc5-8c2bb0366eb3 tempest-ServerDiagnosticsTest-1806019006 tempest-ServerDiagnosticsTest-1806019006-project-member] [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] Instance cache missing network info. {{(pid=61995) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 550.717580] env[61995]: DEBUG oslo_concurrency.lockutils [None req-182cf4eb-3c80-4ae3-ba78-5058296d1da7 tempest-ServersAdminTestJSON-252110596 tempest-ServersAdminTestJSON-252110596-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 551.008963] env[61995]: DEBUG nova.network.neutron [None req-f3a8686d-cadc-4239-bfc5-8c2bb0366eb3 tempest-ServerDiagnosticsTest-1806019006 tempest-ServerDiagnosticsTest-1806019006-project-member] [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] Updating instance_info_cache with network_info: [] {{(pid=61995) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 551.009503] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-f3a8686d-cadc-4239-bfc5-8c2bb0366eb3 tempest-ServerDiagnosticsTest-1806019006 tempest-ServerDiagnosticsTest-1806019006-project-member] Expecting reply to msg 01130c1358d246a1a602efc49021e99f in queue reply_757213bc08bb49dab178826d88b76f40 [ 551.018627] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 01130c1358d246a1a602efc49021e99f [ 551.232180] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-9e02e760-5bb2-4f5c-92cd-9104f9d2f814 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] Expecting reply to msg 1e5279a6c68e4793b0e634d44b0a5595 in queue reply_757213bc08bb49dab178826d88b76f40 [ 551.244286] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1e5279a6c68e4793b0e634d44b0a5595 [ 551.328923] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1362ff4-9ae9-470a-b929-c2bec0348282 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.337188] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aba4ea17-2903-4488-b6ea-d5a3d970979b {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.371922] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ba4cffe-d60a-4a2c-aa37-30f399501c01 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.380093] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a13267e-19d1-4c1d-b895-a53f100e18f1 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.395000] env[61995]: DEBUG nova.compute.provider_tree [None req-95ce5374-e6a2-4567-a19e-2479f98bd091 tempest-VolumesAssistedSnapshotsTest-926918021 tempest-VolumesAssistedSnapshotsTest-926918021-project-member] Inventory has not changed in ProviderTree for provider: c68e615f-c0c6-4278-bb7b-a579e68878a7 {{(pid=61995) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 551.395861] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-95ce5374-e6a2-4567-a19e-2479f98bd091 tempest-VolumesAssistedSnapshotsTest-926918021 tempest-VolumesAssistedSnapshotsTest-926918021-project-member] Expecting reply to msg 6f818d9094904fe0a20801f173fff140 in queue reply_757213bc08bb49dab178826d88b76f40 [ 551.403046] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6f818d9094904fe0a20801f173fff140 [ 551.513821] env[61995]: DEBUG oslo_concurrency.lockutils [None req-f3a8686d-cadc-4239-bfc5-8c2bb0366eb3 tempest-ServerDiagnosticsTest-1806019006 tempest-ServerDiagnosticsTest-1806019006-project-member] Releasing lock "refresh_cache-2a07c11a-4e95-47db-bf50-5ad720403faa" {{(pid=61995) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 551.514109] env[61995]: DEBUG nova.compute.manager [None req-f3a8686d-cadc-4239-bfc5-8c2bb0366eb3 tempest-ServerDiagnosticsTest-1806019006 tempest-ServerDiagnosticsTest-1806019006-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61995) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3038}} [ 551.514275] env[61995]: DEBUG nova.compute.manager [None req-f3a8686d-cadc-4239-bfc5-8c2bb0366eb3 tempest-ServerDiagnosticsTest-1806019006 tempest-ServerDiagnosticsTest-1806019006-project-member] [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] Deallocating network for instance {{(pid=61995) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2298}} [ 551.514436] env[61995]: DEBUG nova.network.neutron [None req-f3a8686d-cadc-4239-bfc5-8c2bb0366eb3 tempest-ServerDiagnosticsTest-1806019006 tempest-ServerDiagnosticsTest-1806019006-project-member] [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] deallocate_for_instance() {{(pid=61995) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 551.660720] env[61995]: DEBUG nova.network.neutron [None req-f3a8686d-cadc-4239-bfc5-8c2bb0366eb3 tempest-ServerDiagnosticsTest-1806019006 tempest-ServerDiagnosticsTest-1806019006-project-member] [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] Instance cache missing network info. {{(pid=61995) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 551.661389] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-f3a8686d-cadc-4239-bfc5-8c2bb0366eb3 tempest-ServerDiagnosticsTest-1806019006 tempest-ServerDiagnosticsTest-1806019006-project-member] Expecting reply to msg 719e344738c44937a339320f3d8a5dbe in queue reply_757213bc08bb49dab178826d88b76f40 [ 551.671725] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 719e344738c44937a339320f3d8a5dbe [ 551.743847] env[61995]: DEBUG oslo_concurrency.lockutils [None req-9e02e760-5bb2-4f5c-92cd-9104f9d2f814 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] Acquiring lock "5527576a-d56d-42c3-a7f7-02c66c0d1b3d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 551.744134] env[61995]: DEBUG oslo_concurrency.lockutils [None req-9e02e760-5bb2-4f5c-92cd-9104f9d2f814 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] Lock "5527576a-d56d-42c3-a7f7-02c66c0d1b3d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 551.744341] env[61995]: DEBUG oslo_concurrency.lockutils [None req-9e02e760-5bb2-4f5c-92cd-9104f9d2f814 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] Acquiring lock "5527576a-d56d-42c3-a7f7-02c66c0d1b3d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 551.744547] env[61995]: DEBUG oslo_concurrency.lockutils [None req-9e02e760-5bb2-4f5c-92cd-9104f9d2f814 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] Lock "5527576a-d56d-42c3-a7f7-02c66c0d1b3d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 551.744680] env[61995]: DEBUG oslo_concurrency.lockutils [None req-9e02e760-5bb2-4f5c-92cd-9104f9d2f814 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] Lock "5527576a-d56d-42c3-a7f7-02c66c0d1b3d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 551.746802] env[61995]: INFO nova.compute.manager [None req-9e02e760-5bb2-4f5c-92cd-9104f9d2f814 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] [instance: 5527576a-d56d-42c3-a7f7-02c66c0d1b3d] Terminating instance [ 551.748515] env[61995]: DEBUG oslo_concurrency.lockutils [None req-9e02e760-5bb2-4f5c-92cd-9104f9d2f814 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] Acquiring lock "refresh_cache-5527576a-d56d-42c3-a7f7-02c66c0d1b3d" {{(pid=61995) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 551.748670] env[61995]: DEBUG oslo_concurrency.lockutils [None req-9e02e760-5bb2-4f5c-92cd-9104f9d2f814 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] Acquired lock "refresh_cache-5527576a-d56d-42c3-a7f7-02c66c0d1b3d" {{(pid=61995) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 551.748832] env[61995]: DEBUG nova.network.neutron [None req-9e02e760-5bb2-4f5c-92cd-9104f9d2f814 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] [instance: 5527576a-d56d-42c3-a7f7-02c66c0d1b3d] Building network info cache for instance {{(pid=61995) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 551.749380] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-9e02e760-5bb2-4f5c-92cd-9104f9d2f814 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] Expecting reply to msg 006547e202584009abd1de192b444ab5 in queue reply_757213bc08bb49dab178826d88b76f40 [ 551.756647] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 006547e202584009abd1de192b444ab5 [ 551.898862] env[61995]: DEBUG nova.scheduler.client.report [None req-95ce5374-e6a2-4567-a19e-2479f98bd091 tempest-VolumesAssistedSnapshotsTest-926918021 tempest-VolumesAssistedSnapshotsTest-926918021-project-member] Inventory has not changed for provider c68e615f-c0c6-4278-bb7b-a579e68878a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61995) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 551.901336] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-95ce5374-e6a2-4567-a19e-2479f98bd091 tempest-VolumesAssistedSnapshotsTest-926918021 tempest-VolumesAssistedSnapshotsTest-926918021-project-member] Expecting reply to msg 635d8141df8842459d73d4140bb82e5c in queue reply_757213bc08bb49dab178826d88b76f40 [ 551.912071] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 635d8141df8842459d73d4140bb82e5c [ 552.165737] env[61995]: DEBUG nova.network.neutron [None req-f3a8686d-cadc-4239-bfc5-8c2bb0366eb3 tempest-ServerDiagnosticsTest-1806019006 tempest-ServerDiagnosticsTest-1806019006-project-member] [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] Updating instance_info_cache with network_info: [] {{(pid=61995) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 552.165737] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-f3a8686d-cadc-4239-bfc5-8c2bb0366eb3 tempest-ServerDiagnosticsTest-1806019006 tempest-ServerDiagnosticsTest-1806019006-project-member] Expecting reply to msg b9c2740d37454884a77e53677af9fc78 in queue reply_757213bc08bb49dab178826d88b76f40 [ 552.170333] env[61995]: DEBUG nova.compute.manager [req-76ac0ce1-c889-4d06-b4b1-ce58fb328cd9 req-feb26170-4720-4a29-9a3e-7859be4cc661 service nova] [instance: b819920a-cc74-4718-a054-e81affabcd5b] Received event network-vif-deleted-3a82a148-8617-45cb-95d6-514d987604e1 {{(pid=61995) external_instance_event /opt/stack/nova/nova/compute/manager.py:11144}} [ 552.173850] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b9c2740d37454884a77e53677af9fc78 [ 552.278388] env[61995]: DEBUG nova.network.neutron [None req-9e02e760-5bb2-4f5c-92cd-9104f9d2f814 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] [instance: 5527576a-d56d-42c3-a7f7-02c66c0d1b3d] Instance cache missing network info. {{(pid=61995) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 552.381567] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-78466350-68e0-4e98-859f-aac3601dd53c tempest-ServersAdmin275Test-481182624 tempest-ServersAdmin275Test-481182624-project-admin] Expecting reply to msg 47efd2914c834f70b793a3f073482308 in queue reply_757213bc08bb49dab178826d88b76f40 [ 552.383074] env[61995]: DEBUG nova.network.neutron [None req-9e02e760-5bb2-4f5c-92cd-9104f9d2f814 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] [instance: 5527576a-d56d-42c3-a7f7-02c66c0d1b3d] Updating instance_info_cache with network_info: [] {{(pid=61995) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 552.383502] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-9e02e760-5bb2-4f5c-92cd-9104f9d2f814 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] Expecting reply to msg 6f4a540043134e96bf363dfc002b6a99 in queue reply_757213bc08bb49dab178826d88b76f40 [ 552.397062] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6f4a540043134e96bf363dfc002b6a99 [ 552.397609] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 47efd2914c834f70b793a3f073482308 [ 552.403875] env[61995]: DEBUG oslo_concurrency.lockutils [None req-95ce5374-e6a2-4567-a19e-2479f98bd091 tempest-VolumesAssistedSnapshotsTest-926918021 tempest-VolumesAssistedSnapshotsTest-926918021-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.299s {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 552.404394] env[61995]: DEBUG nova.compute.manager [None req-95ce5374-e6a2-4567-a19e-2479f98bd091 tempest-VolumesAssistedSnapshotsTest-926918021 tempest-VolumesAssistedSnapshotsTest-926918021-project-member] [instance: 6fffc743-caaa-4356-9406-6bdc6321aa1f] Start building networks asynchronously for instance. {{(pid=61995) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 552.406173] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-95ce5374-e6a2-4567-a19e-2479f98bd091 tempest-VolumesAssistedSnapshotsTest-926918021 tempest-VolumesAssistedSnapshotsTest-926918021-project-member] Expecting reply to msg 456450efbcaa44b68d6b0d8a1d492fe1 in queue reply_757213bc08bb49dab178826d88b76f40 [ 552.407100] env[61995]: DEBUG oslo_concurrency.lockutils [None req-b5402465-5749-4221-a8d2-1de6379c27b6 tempest-VolumesAdminNegativeTest-869695280 tempest-VolumesAdminNegativeTest-869695280-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 12.185s {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 552.408944] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-b5402465-5749-4221-a8d2-1de6379c27b6 tempest-VolumesAdminNegativeTest-869695280 tempest-VolumesAdminNegativeTest-869695280-project-member] Expecting reply to msg e2684adf56504c3981dcf3a31c4cb6fb in queue reply_757213bc08bb49dab178826d88b76f40 [ 552.446647] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 456450efbcaa44b68d6b0d8a1d492fe1 [ 552.452114] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e2684adf56504c3981dcf3a31c4cb6fb [ 552.670212] env[61995]: INFO nova.compute.manager [None req-f3a8686d-cadc-4239-bfc5-8c2bb0366eb3 tempest-ServerDiagnosticsTest-1806019006 tempest-ServerDiagnosticsTest-1806019006-project-member] [instance: 2a07c11a-4e95-47db-bf50-5ad720403faa] Took 1.16 seconds to deallocate network for instance. [ 552.672130] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-f3a8686d-cadc-4239-bfc5-8c2bb0366eb3 tempest-ServerDiagnosticsTest-1806019006 tempest-ServerDiagnosticsTest-1806019006-project-member] Expecting reply to msg f1f58724dea146e388bd0f4d08bf2c11 in queue reply_757213bc08bb49dab178826d88b76f40 [ 552.717681] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f1f58724dea146e388bd0f4d08bf2c11 [ 552.889227] env[61995]: DEBUG oslo_concurrency.lockutils [None req-9e02e760-5bb2-4f5c-92cd-9104f9d2f814 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] Releasing lock "refresh_cache-5527576a-d56d-42c3-a7f7-02c66c0d1b3d" {{(pid=61995) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 552.889227] env[61995]: DEBUG nova.compute.manager [None req-9e02e760-5bb2-4f5c-92cd-9104f9d2f814 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] [instance: 5527576a-d56d-42c3-a7f7-02c66c0d1b3d] Start destroying the instance on the hypervisor. {{(pid=61995) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 552.889227] env[61995]: DEBUG nova.virt.vmwareapi.vmops [None req-9e02e760-5bb2-4f5c-92cd-9104f9d2f814 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] [instance: 5527576a-d56d-42c3-a7f7-02c66c0d1b3d] Destroying instance {{(pid=61995) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 552.889456] env[61995]: INFO nova.compute.manager [None req-78466350-68e0-4e98-859f-aac3601dd53c tempest-ServersAdmin275Test-481182624 tempest-ServersAdmin275Test-481182624-project-admin] [instance: 1bf7f97f-ccfe-4ee0-a6fb-2234769c4b62] Rebuilding instance [ 552.892909] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-805a1ae0-a78a-46c6-bd1c-e7ac0fac27b9 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.899620] env[61995]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e02e760-5bb2-4f5c-92cd-9104f9d2f814 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] [instance: 5527576a-d56d-42c3-a7f7-02c66c0d1b3d] Powering off the VM {{(pid=61995) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 552.899897] env[61995]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-62daa02d-40a1-47ef-afb6-aeef73595be2 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.915028] env[61995]: DEBUG nova.compute.utils [None req-95ce5374-e6a2-4567-a19e-2479f98bd091 tempest-VolumesAssistedSnapshotsTest-926918021 tempest-VolumesAssistedSnapshotsTest-926918021-project-member] Using /dev/sd instead of None {{(pid=61995) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 552.920707] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-95ce5374-e6a2-4567-a19e-2479f98bd091 tempest-VolumesAssistedSnapshotsTest-926918021 tempest-VolumesAssistedSnapshotsTest-926918021-project-member] Expecting reply to msg 28c3b5a132c04cefaec398208513e5e6 in queue reply_757213bc08bb49dab178826d88b76f40 [ 552.920707] env[61995]: DEBUG nova.compute.manager [None req-95ce5374-e6a2-4567-a19e-2479f98bd091 tempest-VolumesAssistedSnapshotsTest-926918021 tempest-VolumesAssistedSnapshotsTest-926918021-project-member] [instance: 6fffc743-caaa-4356-9406-6bdc6321aa1f] Allocating IP information in the background. {{(pid=61995) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1990}} [ 552.920707] env[61995]: DEBUG nova.network.neutron [None req-95ce5374-e6a2-4567-a19e-2479f98bd091 tempest-VolumesAssistedSnapshotsTest-926918021 tempest-VolumesAssistedSnapshotsTest-926918021-project-member] [instance: 6fffc743-caaa-4356-9406-6bdc6321aa1f] allocate_for_instance() {{(pid=61995) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 552.921148] env[61995]: DEBUG oslo_vmware.api [None req-9e02e760-5bb2-4f5c-92cd-9104f9d2f814 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] Waiting for the task: (returnval){ [ 552.921148] env[61995]: value = "task-378097" [ 552.921148] env[61995]: _type = "Task" [ 552.921148] env[61995]: } to complete. {{(pid=61995) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 552.936634] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 28c3b5a132c04cefaec398208513e5e6 [ 552.937222] env[61995]: DEBUG oslo_vmware.api [None req-9e02e760-5bb2-4f5c-92cd-9104f9d2f814 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] Task: {'id': task-378097, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61995) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 552.964531] env[61995]: DEBUG nova.compute.manager [None req-78466350-68e0-4e98-859f-aac3601dd53c tempest-ServersAdmin275Test-481182624 tempest-ServersAdmin275Test-481182624-project-admin] [instance: 1bf7f97f-ccfe-4ee0-a6fb-2234769c4b62] Checking state {{(pid=61995) _get_power_state /opt/stack/nova/nova/compute/manager.py:1800}} [ 552.966349] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d171c04-eb04-4e66-a1d7-587ffa0c8a95 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.981617] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-78466350-68e0-4e98-859f-aac3601dd53c tempest-ServersAdmin275Test-481182624 tempest-ServersAdmin275Test-481182624-project-admin] Expecting reply to msg 014532af323e437fb779cad34c7bb16e in queue reply_757213bc08bb49dab178826d88b76f40 [ 553.038273] env[61995]: DEBUG nova.policy [None req-95ce5374-e6a2-4567-a19e-2479f98bd091 tempest-VolumesAssistedSnapshotsTest-926918021 tempest-VolumesAssistedSnapshotsTest-926918021-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '07ce384cd48442c3a4b50497f3cb2633', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9d0bfaef120f4c2ba98e1b533ffedc23', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61995) authorize /opt/stack/nova/nova/policy.py:203}} [ 553.060314] env[61995]: ERROR nova.compute.manager [None req-a85a9074-04e3-4381-94ff-506e7a8711e5 tempest-AttachInterfacesV270Test-1294541225 tempest-AttachInterfacesV270Test-1294541225-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port c2d91fee-658c-42ae-9503-e12f3995ca7e, please check neutron logs for more information. [ 553.060314] env[61995]: ERROR nova.compute.manager Traceback (most recent call last): [ 553.060314] env[61995]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1998, in _allocate_network_async [ 553.060314] env[61995]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 553.060314] env[61995]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 553.060314] env[61995]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 553.060314] env[61995]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 553.060314] env[61995]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 553.060314] env[61995]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 553.060314] env[61995]: ERROR nova.compute.manager self.force_reraise() [ 553.060314] env[61995]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 553.060314] env[61995]: ERROR nova.compute.manager raise self.value [ 553.060314] env[61995]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 553.060314] env[61995]: ERROR nova.compute.manager updated_port = self._update_port( [ 553.060314] env[61995]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 553.060314] env[61995]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 553.060872] env[61995]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 553.060872] env[61995]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 553.060872] env[61995]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port c2d91fee-658c-42ae-9503-e12f3995ca7e, please check neutron logs for more information. [ 553.060872] env[61995]: ERROR nova.compute.manager [ 553.060872] env[61995]: Traceback (most recent call last): [ 553.060872] env[61995]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 553.060872] env[61995]: listener.cb(fileno) [ 553.060872] env[61995]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 553.060872] env[61995]: result = function(*args, **kwargs) [ 553.060872] env[61995]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 553.060872] env[61995]: return func(*args, **kwargs) [ 553.060872] env[61995]: File "/opt/stack/nova/nova/compute/manager.py", line 2020, in _allocate_network_async [ 553.060872] env[61995]: raise e [ 553.060872] env[61995]: File "/opt/stack/nova/nova/compute/manager.py", line 1998, in _allocate_network_async [ 553.060872] env[61995]: nwinfo = self.network_api.allocate_for_instance( [ 553.060872] env[61995]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 553.060872] env[61995]: created_port_ids = self._update_ports_for_instance( [ 553.060872] env[61995]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 553.060872] env[61995]: with excutils.save_and_reraise_exception(): [ 553.060872] env[61995]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 553.060872] env[61995]: self.force_reraise() [ 553.060872] env[61995]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 553.060872] env[61995]: raise self.value [ 553.060872] env[61995]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 553.060872] env[61995]: updated_port = self._update_port( [ 553.060872] env[61995]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 553.060872] env[61995]: _ensure_no_port_binding_failure(port) [ 553.060872] env[61995]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 553.060872] env[61995]: raise exception.PortBindingFailed(port_id=port['id']) [ 553.061714] env[61995]: nova.exception.PortBindingFailed: Binding failed for port c2d91fee-658c-42ae-9503-e12f3995ca7e, please check neutron logs for more information. [ 553.061714] env[61995]: Removing descriptor: 15 [ 553.062119] env[61995]: ERROR nova.compute.manager [None req-a85a9074-04e3-4381-94ff-506e7a8711e5 tempest-AttachInterfacesV270Test-1294541225 tempest-AttachInterfacesV270Test-1294541225-project-member] [instance: 2dbe6731-bab0-4a47-91c7-a1d8b42dcec4] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port c2d91fee-658c-42ae-9503-e12f3995ca7e, please check neutron logs for more information. [ 553.062119] env[61995]: ERROR nova.compute.manager [instance: 2dbe6731-bab0-4a47-91c7-a1d8b42dcec4] Traceback (most recent call last): [ 553.062119] env[61995]: ERROR nova.compute.manager [instance: 2dbe6731-bab0-4a47-91c7-a1d8b42dcec4] File "/opt/stack/nova/nova/compute/manager.py", line 2903, in _build_resources [ 553.062119] env[61995]: ERROR nova.compute.manager [instance: 2dbe6731-bab0-4a47-91c7-a1d8b42dcec4] yield resources [ 553.062119] env[61995]: ERROR nova.compute.manager [instance: 2dbe6731-bab0-4a47-91c7-a1d8b42dcec4] File "/opt/stack/nova/nova/compute/manager.py", line 2650, in _build_and_run_instance [ 553.062119] env[61995]: ERROR nova.compute.manager [instance: 2dbe6731-bab0-4a47-91c7-a1d8b42dcec4] self.driver.spawn(context, instance, image_meta, [ 553.062119] env[61995]: ERROR nova.compute.manager [instance: 2dbe6731-bab0-4a47-91c7-a1d8b42dcec4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 553.062119] env[61995]: ERROR nova.compute.manager [instance: 2dbe6731-bab0-4a47-91c7-a1d8b42dcec4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 553.062119] env[61995]: ERROR nova.compute.manager [instance: 2dbe6731-bab0-4a47-91c7-a1d8b42dcec4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 553.062119] env[61995]: ERROR nova.compute.manager [instance: 2dbe6731-bab0-4a47-91c7-a1d8b42dcec4] vm_ref = self.build_virtual_machine(instance, [ 553.062119] env[61995]: ERROR nova.compute.manager [instance: 2dbe6731-bab0-4a47-91c7-a1d8b42dcec4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 553.062500] env[61995]: ERROR nova.compute.manager [instance: 2dbe6731-bab0-4a47-91c7-a1d8b42dcec4] vif_infos = vmwarevif.get_vif_info(self._session, [ 553.062500] env[61995]: ERROR nova.compute.manager [instance: 2dbe6731-bab0-4a47-91c7-a1d8b42dcec4] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 553.062500] env[61995]: ERROR nova.compute.manager [instance: 2dbe6731-bab0-4a47-91c7-a1d8b42dcec4] for vif in network_info: [ 553.062500] env[61995]: ERROR nova.compute.manager [instance: 2dbe6731-bab0-4a47-91c7-a1d8b42dcec4] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 553.062500] env[61995]: ERROR nova.compute.manager [instance: 2dbe6731-bab0-4a47-91c7-a1d8b42dcec4] return self._sync_wrapper(fn, *args, **kwargs) [ 553.062500] env[61995]: ERROR nova.compute.manager [instance: 2dbe6731-bab0-4a47-91c7-a1d8b42dcec4] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 553.062500] env[61995]: ERROR nova.compute.manager [instance: 2dbe6731-bab0-4a47-91c7-a1d8b42dcec4] self.wait() [ 553.062500] env[61995]: ERROR nova.compute.manager [instance: 2dbe6731-bab0-4a47-91c7-a1d8b42dcec4] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 553.062500] env[61995]: ERROR nova.compute.manager [instance: 2dbe6731-bab0-4a47-91c7-a1d8b42dcec4] self[:] = self._gt.wait() [ 553.062500] env[61995]: ERROR nova.compute.manager [instance: 2dbe6731-bab0-4a47-91c7-a1d8b42dcec4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 553.062500] env[61995]: ERROR nova.compute.manager [instance: 2dbe6731-bab0-4a47-91c7-a1d8b42dcec4] return self._exit_event.wait() [ 553.062500] env[61995]: ERROR nova.compute.manager [instance: 2dbe6731-bab0-4a47-91c7-a1d8b42dcec4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 553.062500] env[61995]: ERROR nova.compute.manager [instance: 2dbe6731-bab0-4a47-91c7-a1d8b42dcec4] result = hub.switch() [ 553.063017] env[61995]: ERROR nova.compute.manager [instance: 2dbe6731-bab0-4a47-91c7-a1d8b42dcec4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 553.063017] env[61995]: ERROR nova.compute.manager [instance: 2dbe6731-bab0-4a47-91c7-a1d8b42dcec4] return self.greenlet.switch() [ 553.063017] env[61995]: ERROR nova.compute.manager [instance: 2dbe6731-bab0-4a47-91c7-a1d8b42dcec4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 553.063017] env[61995]: ERROR nova.compute.manager [instance: 2dbe6731-bab0-4a47-91c7-a1d8b42dcec4] result = function(*args, **kwargs) [ 553.063017] env[61995]: ERROR nova.compute.manager [instance: 2dbe6731-bab0-4a47-91c7-a1d8b42dcec4] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 553.063017] env[61995]: ERROR nova.compute.manager [instance: 2dbe6731-bab0-4a47-91c7-a1d8b42dcec4] return func(*args, **kwargs) [ 553.063017] env[61995]: ERROR nova.compute.manager [instance: 2dbe6731-bab0-4a47-91c7-a1d8b42dcec4] File "/opt/stack/nova/nova/compute/manager.py", line 2020, in _allocate_network_async [ 553.063017] env[61995]: ERROR nova.compute.manager [instance: 2dbe6731-bab0-4a47-91c7-a1d8b42dcec4] raise e [ 553.063017] env[61995]: ERROR nova.compute.manager [instance: 2dbe6731-bab0-4a47-91c7-a1d8b42dcec4] File "/opt/stack/nova/nova/compute/manager.py", line 1998, in _allocate_network_async [ 553.063017] env[61995]: ERROR nova.compute.manager [instance: 2dbe6731-bab0-4a47-91c7-a1d8b42dcec4] nwinfo = self.network_api.allocate_for_instance( [ 553.063017] env[61995]: ERROR nova.compute.manager [instance: 2dbe6731-bab0-4a47-91c7-a1d8b42dcec4] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 553.063017] env[61995]: ERROR nova.compute.manager [instance: 2dbe6731-bab0-4a47-91c7-a1d8b42dcec4] created_port_ids = self._update_ports_for_instance( [ 553.063017] env[61995]: ERROR nova.compute.manager [instance: 2dbe6731-bab0-4a47-91c7-a1d8b42dcec4] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 553.063381] env[61995]: ERROR nova.compute.manager [instance: 2dbe6731-bab0-4a47-91c7-a1d8b42dcec4] with excutils.save_and_reraise_exception(): [ 553.063381] env[61995]: ERROR nova.compute.manager [instance: 2dbe6731-bab0-4a47-91c7-a1d8b42dcec4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 553.063381] env[61995]: ERROR nova.compute.manager [instance: 2dbe6731-bab0-4a47-91c7-a1d8b42dcec4] self.force_reraise() [ 553.063381] env[61995]: ERROR nova.compute.manager [instance: 2dbe6731-bab0-4a47-91c7-a1d8b42dcec4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 553.063381] env[61995]: ERROR nova.compute.manager [instance: 2dbe6731-bab0-4a47-91c7-a1d8b42dcec4] raise self.value [ 553.063381] env[61995]: ERROR nova.compute.manager [instance: 2dbe6731-bab0-4a47-91c7-a1d8b42dcec4] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 553.063381] env[61995]: ERROR nova.compute.manager [instance: 2dbe6731-bab0-4a47-91c7-a1d8b42dcec4] updated_port = self._update_port( [ 553.063381] env[61995]: ERROR nova.compute.manager [instance: 2dbe6731-bab0-4a47-91c7-a1d8b42dcec4] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 553.063381] env[61995]: ERROR nova.compute.manager [instance: 2dbe6731-bab0-4a47-91c7-a1d8b42dcec4] _ensure_no_port_binding_failure(port) [ 553.063381] env[61995]: ERROR nova.compute.manager [instance: 2dbe6731-bab0-4a47-91c7-a1d8b42dcec4] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 553.063381] env[61995]: ERROR nova.compute.manager [instance: 2dbe6731-bab0-4a47-91c7-a1d8b42dcec4] raise exception.PortBindingFailed(port_id=port['id']) [ 553.063381] env[61995]: ERROR nova.compute.manager [instance: 2dbe6731-bab0-4a47-91c7-a1d8b42dcec4] nova.exception.PortBindingFailed: Binding failed for port c2d91fee-658c-42ae-9503-e12f3995ca7e, please check neutron logs for more information. [ 553.063381] env[61995]: ERROR nova.compute.manager [instance: 2dbe6731-bab0-4a47-91c7-a1d8b42dcec4] [ 553.063751] env[61995]: INFO nova.compute.manager [None req-a85a9074-04e3-4381-94ff-506e7a8711e5 tempest-AttachInterfacesV270Test-1294541225 tempest-AttachInterfacesV270Test-1294541225-project-member] [instance: 2dbe6731-bab0-4a47-91c7-a1d8b42dcec4] Terminating instance [ 553.066795] env[61995]: DEBUG oslo_concurrency.lockutils [None req-a85a9074-04e3-4381-94ff-506e7a8711e5 tempest-AttachInterfacesV270Test-1294541225 tempest-AttachInterfacesV270Test-1294541225-project-member] Acquiring lock "refresh_cache-2dbe6731-bab0-4a47-91c7-a1d8b42dcec4" {{(pid=61995) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 553.067066] env[61995]: DEBUG oslo_concurrency.lockutils [None req-a85a9074-04e3-4381-94ff-506e7a8711e5 tempest-AttachInterfacesV270Test-1294541225 tempest-AttachInterfacesV270Test-1294541225-project-member] Acquired lock "refresh_cache-2dbe6731-bab0-4a47-91c7-a1d8b42dcec4" {{(pid=61995) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 553.067558] env[61995]: DEBUG nova.network.neutron [None req-a85a9074-04e3-4381-94ff-506e7a8711e5 tempest-AttachInterfacesV270Test-1294541225 tempest-AttachInterfacesV270Test-1294541225-project-member] [instance: 2dbe6731-bab0-4a47-91c7-a1d8b42dcec4] Building network info cache for instance {{(pid=61995) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 553.068369] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-a85a9074-04e3-4381-94ff-506e7a8711e5 tempest-AttachInterfacesV270Test-1294541225 tempest-AttachInterfacesV270Test-1294541225-project-member] Expecting reply to msg 6285d6ad289e42b58371af90204ec902 in queue reply_757213bc08bb49dab178826d88b76f40 [ 553.075903] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6285d6ad289e42b58371af90204ec902 [ 553.078238] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 014532af323e437fb779cad34c7bb16e [ 553.149549] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b76494e4-ffc8-437c-8651-56fb1fa379e9 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.160197] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb3edba1-efa1-43ea-a39e-115e0f6f00ed {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.215454] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-f3a8686d-cadc-4239-bfc5-8c2bb0366eb3 tempest-ServerDiagnosticsTest-1806019006 tempest-ServerDiagnosticsTest-1806019006-project-member] Expecting reply to msg b5cb982dea834faa8148ec1d04e40c11 in queue reply_757213bc08bb49dab178826d88b76f40 [ 553.217693] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1670c050-80f3-4d33-8628-03d2ecaef058 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.238833] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad958ea4-7c87-40bc-83cb-fc256f79bdbc {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.259169] env[61995]: DEBUG nova.compute.provider_tree [None req-b5402465-5749-4221-a8d2-1de6379c27b6 tempest-VolumesAdminNegativeTest-869695280 tempest-VolumesAdminNegativeTest-869695280-project-member] Inventory has not changed in ProviderTree for provider: c68e615f-c0c6-4278-bb7b-a579e68878a7 {{(pid=61995) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 553.259932] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-b5402465-5749-4221-a8d2-1de6379c27b6 tempest-VolumesAdminNegativeTest-869695280 tempest-VolumesAdminNegativeTest-869695280-project-member] Expecting reply to msg 93f84b256b2444418c1b202ef0586b84 in queue reply_757213bc08bb49dab178826d88b76f40 [ 553.270328] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b5cb982dea834faa8148ec1d04e40c11 [ 553.270887] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 93f84b256b2444418c1b202ef0586b84 [ 553.426827] env[61995]: DEBUG nova.compute.manager [None req-95ce5374-e6a2-4567-a19e-2479f98bd091 tempest-VolumesAssistedSnapshotsTest-926918021 tempest-VolumesAssistedSnapshotsTest-926918021-project-member] [instance: 6fffc743-caaa-4356-9406-6bdc6321aa1f] Start building block device mappings for instance. {{(pid=61995) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 553.426827] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-95ce5374-e6a2-4567-a19e-2479f98bd091 tempest-VolumesAssistedSnapshotsTest-926918021 tempest-VolumesAssistedSnapshotsTest-926918021-project-member] Expecting reply to msg cc08d8db2dc04bc49affbd5bb390eddb in queue reply_757213bc08bb49dab178826d88b76f40 [ 553.446566] env[61995]: DEBUG oslo_vmware.api [None req-9e02e760-5bb2-4f5c-92cd-9104f9d2f814 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] Task: {'id': task-378097, 'name': PowerOffVM_Task, 'duration_secs': 0.210068} completed successfully. {{(pid=61995) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 553.446860] env[61995]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e02e760-5bb2-4f5c-92cd-9104f9d2f814 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] [instance: 5527576a-d56d-42c3-a7f7-02c66c0d1b3d] Powered off the VM {{(pid=61995) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 553.447230] env[61995]: DEBUG nova.virt.vmwareapi.vmops [None req-9e02e760-5bb2-4f5c-92cd-9104f9d2f814 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] [instance: 5527576a-d56d-42c3-a7f7-02c66c0d1b3d] Unregistering the VM {{(pid=61995) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 553.447322] env[61995]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b7d83166-5f5d-4812-a7f9-ef680a7fe7d7 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.472824] env[61995]: DEBUG nova.virt.vmwareapi.vmops [None req-9e02e760-5bb2-4f5c-92cd-9104f9d2f814 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] [instance: 5527576a-d56d-42c3-a7f7-02c66c0d1b3d] Unregistered the VM {{(pid=61995) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 553.472824] env[61995]: DEBUG nova.virt.vmwareapi.vmops [None req-9e02e760-5bb2-4f5c-92cd-9104f9d2f814 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] [instance: 5527576a-d56d-42c3-a7f7-02c66c0d1b3d] Deleting contents of the VM from datastore datastore1 {{(pid=61995) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 553.472957] env[61995]: DEBUG nova.virt.vmwareapi.ds_util [None req-9e02e760-5bb2-4f5c-92cd-9104f9d2f814 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] Deleting the datastore file [datastore1] 5527576a-d56d-42c3-a7f7-02c66c0d1b3d {{(pid=61995) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 553.473131] env[61995]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-952df9a6-063e-48b8-9163-e1e5f70ab4fa {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.480861] env[61995]: DEBUG oslo_vmware.api [None req-9e02e760-5bb2-4f5c-92cd-9104f9d2f814 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] Waiting for the task: (returnval){ [ 553.480861] env[61995]: value = "task-378099" [ 553.480861] env[61995]: _type = "Task" [ 553.480861] env[61995]: } to complete. {{(pid=61995) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 553.484947] env[61995]: DEBUG nova.virt.vmwareapi.vm_util [None req-78466350-68e0-4e98-859f-aac3601dd53c tempest-ServersAdmin275Test-481182624 tempest-ServersAdmin275Test-481182624-project-admin] [instance: 1bf7f97f-ccfe-4ee0-a6fb-2234769c4b62] Powering off the VM {{(pid=61995) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 553.485209] env[61995]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d7f49c18-753c-4fd2-aeab-5ba489aaae25 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.496776] env[61995]: DEBUG oslo_vmware.api [None req-9e02e760-5bb2-4f5c-92cd-9104f9d2f814 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] Task: {'id': task-378099, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61995) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 553.496776] env[61995]: DEBUG oslo_vmware.api [None req-78466350-68e0-4e98-859f-aac3601dd53c tempest-ServersAdmin275Test-481182624 tempest-ServersAdmin275Test-481182624-project-admin] Waiting for the task: (returnval){ [ 553.496776] env[61995]: value = "task-378100" [ 553.496776] env[61995]: _type = "Task" [ 553.496776] env[61995]: } to complete. {{(pid=61995) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 553.503870] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cc08d8db2dc04bc49affbd5bb390eddb [ 553.511096] env[61995]: DEBUG oslo_vmware.api [None req-78466350-68e0-4e98-859f-aac3601dd53c tempest-ServersAdmin275Test-481182624 tempest-ServersAdmin275Test-481182624-project-admin] Task: {'id': task-378100, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61995) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 553.628279] env[61995]: DEBUG nova.network.neutron [None req-a85a9074-04e3-4381-94ff-506e7a8711e5 tempest-AttachInterfacesV270Test-1294541225 tempest-AttachInterfacesV270Test-1294541225-project-member] [instance: 2dbe6731-bab0-4a47-91c7-a1d8b42dcec4] Instance cache missing network info. {{(pid=61995) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 553.747823] env[61995]: INFO nova.scheduler.client.report [None req-f3a8686d-cadc-4239-bfc5-8c2bb0366eb3 tempest-ServerDiagnosticsTest-1806019006 tempest-ServerDiagnosticsTest-1806019006-project-member] Deleted allocations for instance 2a07c11a-4e95-47db-bf50-5ad720403faa [ 553.752232] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-f3a8686d-cadc-4239-bfc5-8c2bb0366eb3 tempest-ServerDiagnosticsTest-1806019006 tempest-ServerDiagnosticsTest-1806019006-project-member] Expecting reply to msg a9c7a6aa7dca47d59665bf60445b1b5b in queue reply_757213bc08bb49dab178826d88b76f40 [ 553.768895] env[61995]: DEBUG nova.scheduler.client.report [None req-b5402465-5749-4221-a8d2-1de6379c27b6 tempest-VolumesAdminNegativeTest-869695280 tempest-VolumesAdminNegativeTest-869695280-project-member] Inventory has not changed for provider c68e615f-c0c6-4278-bb7b-a579e68878a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61995) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 553.771580] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-b5402465-5749-4221-a8d2-1de6379c27b6 tempest-VolumesAdminNegativeTest-869695280 tempest-VolumesAdminNegativeTest-869695280-project-member] Expecting reply to msg 2b7152e758714a339ce169be2cd2f6b9 in queue reply_757213bc08bb49dab178826d88b76f40 [ 553.780946] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a9c7a6aa7dca47d59665bf60445b1b5b [ 553.793356] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2b7152e758714a339ce169be2cd2f6b9 [ 553.944942] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-95ce5374-e6a2-4567-a19e-2479f98bd091 tempest-VolumesAssistedSnapshotsTest-926918021 tempest-VolumesAssistedSnapshotsTest-926918021-project-member] Expecting reply to msg 504d6374aa8c4ff2a9f9ee07b0242b9e in queue reply_757213bc08bb49dab178826d88b76f40 [ 553.992362] env[61995]: DEBUG oslo_vmware.api [None req-9e02e760-5bb2-4f5c-92cd-9104f9d2f814 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] Task: {'id': task-378099, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.126437} completed successfully. {{(pid=61995) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 553.992362] env[61995]: DEBUG nova.virt.vmwareapi.ds_util [None req-9e02e760-5bb2-4f5c-92cd-9104f9d2f814 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] Deleted the datastore file {{(pid=61995) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 553.992362] env[61995]: DEBUG nova.virt.vmwareapi.vmops [None req-9e02e760-5bb2-4f5c-92cd-9104f9d2f814 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] [instance: 5527576a-d56d-42c3-a7f7-02c66c0d1b3d] Deleted contents of the VM from datastore datastore1 {{(pid=61995) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 553.992362] env[61995]: DEBUG nova.virt.vmwareapi.vmops [None req-9e02e760-5bb2-4f5c-92cd-9104f9d2f814 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] [instance: 5527576a-d56d-42c3-a7f7-02c66c0d1b3d] Instance destroyed {{(pid=61995) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 553.992632] env[61995]: INFO nova.compute.manager [None req-9e02e760-5bb2-4f5c-92cd-9104f9d2f814 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] [instance: 5527576a-d56d-42c3-a7f7-02c66c0d1b3d] Took 1.10 seconds to destroy the instance on the hypervisor. [ 553.992802] env[61995]: DEBUG oslo.service.loopingcall [None req-9e02e760-5bb2-4f5c-92cd-9104f9d2f814 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61995) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 553.992902] env[61995]: DEBUG nova.compute.manager [-] [instance: 5527576a-d56d-42c3-a7f7-02c66c0d1b3d] Deallocating network for instance {{(pid=61995) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2298}} [ 553.992994] env[61995]: DEBUG nova.network.neutron [-] [instance: 5527576a-d56d-42c3-a7f7-02c66c0d1b3d] deallocate_for_instance() {{(pid=61995) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 553.998858] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 504d6374aa8c4ff2a9f9ee07b0242b9e [ 554.008687] env[61995]: DEBUG oslo_vmware.api [None req-78466350-68e0-4e98-859f-aac3601dd53c tempest-ServersAdmin275Test-481182624 tempest-ServersAdmin275Test-481182624-project-admin] Task: {'id': task-378100, 'name': PowerOffVM_Task, 'duration_secs': 0.127219} completed successfully. {{(pid=61995) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 554.008945] env[61995]: DEBUG nova.virt.vmwareapi.vm_util [None req-78466350-68e0-4e98-859f-aac3601dd53c tempest-ServersAdmin275Test-481182624 tempest-ServersAdmin275Test-481182624-project-admin] [instance: 1bf7f97f-ccfe-4ee0-a6fb-2234769c4b62] Powered off the VM {{(pid=61995) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 554.009182] env[61995]: DEBUG nova.virt.vmwareapi.vmops [None req-78466350-68e0-4e98-859f-aac3601dd53c tempest-ServersAdmin275Test-481182624 tempest-ServersAdmin275Test-481182624-project-admin] [instance: 1bf7f97f-ccfe-4ee0-a6fb-2234769c4b62] Destroying instance {{(pid=61995) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 554.009990] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebb6e25b-298e-41c7-b481-cbd12d654deb {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.019845] env[61995]: DEBUG nova.virt.vmwareapi.vmops [None req-78466350-68e0-4e98-859f-aac3601dd53c tempest-ServersAdmin275Test-481182624 tempest-ServersAdmin275Test-481182624-project-admin] [instance: 1bf7f97f-ccfe-4ee0-a6fb-2234769c4b62] Unregistering the VM {{(pid=61995) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 554.020484] env[61995]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fcfef146-9831-40b1-9e48-957069baab92 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.039397] env[61995]: DEBUG nova.network.neutron [-] [instance: 5527576a-d56d-42c3-a7f7-02c66c0d1b3d] Instance cache missing network info. {{(pid=61995) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 554.039995] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 3e8439e32ca34a4ea60df85d8a3c41f2 in queue reply_757213bc08bb49dab178826d88b76f40 [ 554.054503] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3e8439e32ca34a4ea60df85d8a3c41f2 [ 554.058454] env[61995]: DEBUG nova.virt.vmwareapi.vmops [None req-78466350-68e0-4e98-859f-aac3601dd53c tempest-ServersAdmin275Test-481182624 tempest-ServersAdmin275Test-481182624-project-admin] [instance: 1bf7f97f-ccfe-4ee0-a6fb-2234769c4b62] Unregistered the VM {{(pid=61995) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 554.058776] env[61995]: DEBUG nova.virt.vmwareapi.vmops [None req-78466350-68e0-4e98-859f-aac3601dd53c tempest-ServersAdmin275Test-481182624 tempest-ServersAdmin275Test-481182624-project-admin] [instance: 1bf7f97f-ccfe-4ee0-a6fb-2234769c4b62] Deleting contents of the VM from datastore datastore2 {{(pid=61995) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 554.059082] env[61995]: DEBUG nova.virt.vmwareapi.ds_util [None req-78466350-68e0-4e98-859f-aac3601dd53c tempest-ServersAdmin275Test-481182624 tempest-ServersAdmin275Test-481182624-project-admin] Deleting the datastore file [datastore2] 1bf7f97f-ccfe-4ee0-a6fb-2234769c4b62 {{(pid=61995) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 554.059459] env[61995]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9b806e44-f3ba-453f-8140-412999a9dca6 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.070627] env[61995]: DEBUG oslo_vmware.api [None req-78466350-68e0-4e98-859f-aac3601dd53c tempest-ServersAdmin275Test-481182624 tempest-ServersAdmin275Test-481182624-project-admin] Waiting for the task: (returnval){ [ 554.070627] env[61995]: value = "task-378103" [ 554.070627] env[61995]: _type = "Task" [ 554.070627] env[61995]: } to complete. {{(pid=61995) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 554.079793] env[61995]: DEBUG oslo_vmware.api [None req-78466350-68e0-4e98-859f-aac3601dd53c tempest-ServersAdmin275Test-481182624 tempest-ServersAdmin275Test-481182624-project-admin] Task: {'id': task-378103, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61995) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 554.136159] env[61995]: DEBUG nova.network.neutron [None req-a85a9074-04e3-4381-94ff-506e7a8711e5 tempest-AttachInterfacesV270Test-1294541225 tempest-AttachInterfacesV270Test-1294541225-project-member] [instance: 2dbe6731-bab0-4a47-91c7-a1d8b42dcec4] Updating instance_info_cache with network_info: [] {{(pid=61995) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 554.136711] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-a85a9074-04e3-4381-94ff-506e7a8711e5 tempest-AttachInterfacesV270Test-1294541225 tempest-AttachInterfacesV270Test-1294541225-project-member] Expecting reply to msg d06b18a781b84002ae63b76e30dd49fd in queue reply_757213bc08bb49dab178826d88b76f40 [ 554.156532] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d06b18a781b84002ae63b76e30dd49fd [ 554.255091] env[61995]: DEBUG oslo_concurrency.lockutils [None req-f3a8686d-cadc-4239-bfc5-8c2bb0366eb3 tempest-ServerDiagnosticsTest-1806019006 tempest-ServerDiagnosticsTest-1806019006-project-member] Lock "2a07c11a-4e95-47db-bf50-5ad720403faa" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.718s {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 554.281767] env[61995]: DEBUG oslo_concurrency.lockutils [None req-b5402465-5749-4221-a8d2-1de6379c27b6 tempest-VolumesAdminNegativeTest-869695280 tempest-VolumesAdminNegativeTest-869695280-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.874s {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 554.282373] env[61995]: ERROR nova.compute.manager [None req-b5402465-5749-4221-a8d2-1de6379c27b6 tempest-VolumesAdminNegativeTest-869695280 tempest-VolumesAdminNegativeTest-869695280-project-member] [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 58958f67-8153-4158-b726-bdfe68b1e491, please check neutron logs for more information. [ 554.282373] env[61995]: ERROR nova.compute.manager [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] Traceback (most recent call last): [ 554.282373] env[61995]: ERROR nova.compute.manager [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] File "/opt/stack/nova/nova/compute/manager.py", line 2650, in _build_and_run_instance [ 554.282373] env[61995]: ERROR nova.compute.manager [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] self.driver.spawn(context, instance, image_meta, [ 554.282373] env[61995]: ERROR nova.compute.manager [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 554.282373] env[61995]: ERROR nova.compute.manager [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 554.282373] env[61995]: ERROR nova.compute.manager [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 554.282373] env[61995]: ERROR nova.compute.manager [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] vm_ref = self.build_virtual_machine(instance, [ 554.282373] env[61995]: ERROR nova.compute.manager [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 554.282373] env[61995]: ERROR nova.compute.manager [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] vif_infos = vmwarevif.get_vif_info(self._session, [ 554.282373] env[61995]: ERROR nova.compute.manager [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 554.282750] env[61995]: ERROR nova.compute.manager [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] for vif in network_info: [ 554.282750] env[61995]: ERROR nova.compute.manager [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 554.282750] env[61995]: ERROR nova.compute.manager [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] return self._sync_wrapper(fn, *args, **kwargs) [ 554.282750] env[61995]: ERROR nova.compute.manager [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 554.282750] env[61995]: ERROR nova.compute.manager [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] self.wait() [ 554.282750] env[61995]: ERROR nova.compute.manager [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 554.282750] env[61995]: ERROR nova.compute.manager [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] self[:] = self._gt.wait() [ 554.282750] env[61995]: ERROR nova.compute.manager [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 554.282750] env[61995]: ERROR nova.compute.manager [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] return self._exit_event.wait() [ 554.282750] env[61995]: ERROR nova.compute.manager [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 554.282750] env[61995]: ERROR nova.compute.manager [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] result = hub.switch() [ 554.282750] env[61995]: ERROR nova.compute.manager [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 554.282750] env[61995]: ERROR nova.compute.manager [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] return self.greenlet.switch() [ 554.283143] env[61995]: ERROR nova.compute.manager [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 554.283143] env[61995]: ERROR nova.compute.manager [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] result = function(*args, **kwargs) [ 554.283143] env[61995]: ERROR nova.compute.manager [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 554.283143] env[61995]: ERROR nova.compute.manager [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] return func(*args, **kwargs) [ 554.283143] env[61995]: ERROR nova.compute.manager [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] File "/opt/stack/nova/nova/compute/manager.py", line 2020, in _allocate_network_async [ 554.283143] env[61995]: ERROR nova.compute.manager [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] raise e [ 554.283143] env[61995]: ERROR nova.compute.manager [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] File "/opt/stack/nova/nova/compute/manager.py", line 1998, in _allocate_network_async [ 554.283143] env[61995]: ERROR nova.compute.manager [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] nwinfo = self.network_api.allocate_for_instance( [ 554.283143] env[61995]: ERROR nova.compute.manager [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 554.283143] env[61995]: ERROR nova.compute.manager [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] created_port_ids = self._update_ports_for_instance( [ 554.283143] env[61995]: ERROR nova.compute.manager [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 554.283143] env[61995]: ERROR nova.compute.manager [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] with excutils.save_and_reraise_exception(): [ 554.283143] env[61995]: ERROR nova.compute.manager [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 554.283532] env[61995]: ERROR nova.compute.manager [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] self.force_reraise() [ 554.283532] env[61995]: ERROR nova.compute.manager [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 554.283532] env[61995]: ERROR nova.compute.manager [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] raise self.value [ 554.283532] env[61995]: ERROR nova.compute.manager [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 554.283532] env[61995]: ERROR nova.compute.manager [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] updated_port = self._update_port( [ 554.283532] env[61995]: ERROR nova.compute.manager [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 554.283532] env[61995]: ERROR nova.compute.manager [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] _ensure_no_port_binding_failure(port) [ 554.283532] env[61995]: ERROR nova.compute.manager [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 554.283532] env[61995]: ERROR nova.compute.manager [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] raise exception.PortBindingFailed(port_id=port['id']) [ 554.283532] env[61995]: ERROR nova.compute.manager [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] nova.exception.PortBindingFailed: Binding failed for port 58958f67-8153-4158-b726-bdfe68b1e491, please check neutron logs for more information. [ 554.283532] env[61995]: ERROR nova.compute.manager [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] [ 554.283871] env[61995]: DEBUG nova.compute.utils [None req-b5402465-5749-4221-a8d2-1de6379c27b6 tempest-VolumesAdminNegativeTest-869695280 tempest-VolumesAdminNegativeTest-869695280-project-member] [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] Binding failed for port 58958f67-8153-4158-b726-bdfe68b1e491, please check neutron logs for more information. {{(pid=61995) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 554.284761] env[61995]: DEBUG oslo_concurrency.lockutils [None req-9ace5a90-e467-428a-82b2-7a1c680dfdf4 tempest-ServersAdminNegativeTestJSON-2047713534 tempest-ServersAdminNegativeTestJSON-2047713534-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.110s {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 554.286172] env[61995]: INFO nova.compute.claims [None req-9ace5a90-e467-428a-82b2-7a1c680dfdf4 tempest-ServersAdminNegativeTestJSON-2047713534 tempest-ServersAdminNegativeTestJSON-2047713534-project-member] [instance: aa885305-7cdb-44d3-9bd2-880eb042a63e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 554.287704] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-9ace5a90-e467-428a-82b2-7a1c680dfdf4 tempest-ServersAdminNegativeTestJSON-2047713534 tempest-ServersAdminNegativeTestJSON-2047713534-project-member] Expecting reply to msg f65e8d9d63074884a18d027e2a25673a in queue reply_757213bc08bb49dab178826d88b76f40 [ 554.288944] env[61995]: DEBUG nova.compute.manager [None req-b5402465-5749-4221-a8d2-1de6379c27b6 tempest-VolumesAdminNegativeTest-869695280 tempest-VolumesAdminNegativeTest-869695280-project-member] [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] Build of instance cfffe8e0-4074-41c9-8e1b-49d621fc3c1b was re-scheduled: Binding failed for port 58958f67-8153-4158-b726-bdfe68b1e491, please check neutron logs for more information. {{(pid=61995) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2489}} [ 554.289391] env[61995]: DEBUG nova.compute.manager [None req-b5402465-5749-4221-a8d2-1de6379c27b6 tempest-VolumesAdminNegativeTest-869695280 tempest-VolumesAdminNegativeTest-869695280-project-member] [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] Unplugging VIFs for instance {{(pid=61995) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3015}} [ 554.289615] env[61995]: DEBUG oslo_concurrency.lockutils [None req-b5402465-5749-4221-a8d2-1de6379c27b6 tempest-VolumesAdminNegativeTest-869695280 tempest-VolumesAdminNegativeTest-869695280-project-member] Acquiring lock "refresh_cache-cfffe8e0-4074-41c9-8e1b-49d621fc3c1b" {{(pid=61995) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 554.289761] env[61995]: DEBUG oslo_concurrency.lockutils [None req-b5402465-5749-4221-a8d2-1de6379c27b6 tempest-VolumesAdminNegativeTest-869695280 tempest-VolumesAdminNegativeTest-869695280-project-member] Acquired lock "refresh_cache-cfffe8e0-4074-41c9-8e1b-49d621fc3c1b" {{(pid=61995) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 554.289941] env[61995]: DEBUG nova.network.neutron [None req-b5402465-5749-4221-a8d2-1de6379c27b6 tempest-VolumesAdminNegativeTest-869695280 tempest-VolumesAdminNegativeTest-869695280-project-member] [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] Building network info cache for instance {{(pid=61995) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 554.290624] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-b5402465-5749-4221-a8d2-1de6379c27b6 tempest-VolumesAdminNegativeTest-869695280 tempest-VolumesAdminNegativeTest-869695280-project-member] Expecting reply to msg 832d82557fe94178a9b16f83a361ed11 in queue reply_757213bc08bb49dab178826d88b76f40 [ 554.296832] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 832d82557fe94178a9b16f83a361ed11 [ 554.327345] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f65e8d9d63074884a18d027e2a25673a [ 554.451221] env[61995]: DEBUG nova.compute.manager [None req-95ce5374-e6a2-4567-a19e-2479f98bd091 tempest-VolumesAssistedSnapshotsTest-926918021 tempest-VolumesAssistedSnapshotsTest-926918021-project-member] [instance: 6fffc743-caaa-4356-9406-6bdc6321aa1f] Start spawning the instance on the hypervisor. {{(pid=61995) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 554.480132] env[61995]: DEBUG nova.virt.hardware [None req-95ce5374-e6a2-4567-a19e-2479f98bd091 tempest-VolumesAssistedSnapshotsTest-926918021 tempest-VolumesAssistedSnapshotsTest-926918021-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-06T07:57:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-06T07:57:11Z,direct_url=,disk_format='vmdk',id=947125f0-9664-40eb-953e-b1373b076c9f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3dc1e6434ef44fd0be01fd4ebf2b62ef',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-06T07:57:12Z,virtual_size=,visibility=), allow threads: False {{(pid=61995) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 554.480405] env[61995]: DEBUG nova.virt.hardware [None req-95ce5374-e6a2-4567-a19e-2479f98bd091 tempest-VolumesAssistedSnapshotsTest-926918021 tempest-VolumesAssistedSnapshotsTest-926918021-project-member] Flavor limits 0:0:0 {{(pid=61995) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 554.480563] env[61995]: DEBUG nova.virt.hardware [None req-95ce5374-e6a2-4567-a19e-2479f98bd091 tempest-VolumesAssistedSnapshotsTest-926918021 tempest-VolumesAssistedSnapshotsTest-926918021-project-member] Image limits 0:0:0 {{(pid=61995) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 554.480741] env[61995]: DEBUG nova.virt.hardware [None req-95ce5374-e6a2-4567-a19e-2479f98bd091 tempest-VolumesAssistedSnapshotsTest-926918021 tempest-VolumesAssistedSnapshotsTest-926918021-project-member] Flavor pref 0:0:0 {{(pid=61995) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 554.480952] env[61995]: DEBUG nova.virt.hardware [None req-95ce5374-e6a2-4567-a19e-2479f98bd091 tempest-VolumesAssistedSnapshotsTest-926918021 tempest-VolumesAssistedSnapshotsTest-926918021-project-member] Image pref 0:0:0 {{(pid=61995) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 554.481103] env[61995]: DEBUG nova.virt.hardware [None req-95ce5374-e6a2-4567-a19e-2479f98bd091 tempest-VolumesAssistedSnapshotsTest-926918021 tempest-VolumesAssistedSnapshotsTest-926918021-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61995) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 554.481329] env[61995]: DEBUG nova.virt.hardware [None req-95ce5374-e6a2-4567-a19e-2479f98bd091 tempest-VolumesAssistedSnapshotsTest-926918021 tempest-VolumesAssistedSnapshotsTest-926918021-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61995) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 554.481490] env[61995]: DEBUG nova.virt.hardware [None req-95ce5374-e6a2-4567-a19e-2479f98bd091 tempest-VolumesAssistedSnapshotsTest-926918021 tempest-VolumesAssistedSnapshotsTest-926918021-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61995) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 554.481657] env[61995]: DEBUG nova.virt.hardware [None req-95ce5374-e6a2-4567-a19e-2479f98bd091 tempest-VolumesAssistedSnapshotsTest-926918021 tempest-VolumesAssistedSnapshotsTest-926918021-project-member] Got 1 possible topologies {{(pid=61995) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 554.481818] env[61995]: DEBUG nova.virt.hardware [None req-95ce5374-e6a2-4567-a19e-2479f98bd091 tempest-VolumesAssistedSnapshotsTest-926918021 tempest-VolumesAssistedSnapshotsTest-926918021-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61995) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 554.481989] env[61995]: DEBUG nova.virt.hardware [None req-95ce5374-e6a2-4567-a19e-2479f98bd091 tempest-VolumesAssistedSnapshotsTest-926918021 tempest-VolumesAssistedSnapshotsTest-926918021-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61995) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 554.482874] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f226da1-f489-4740-a92e-56a8d5fa450d {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.491751] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9f19da8-0e5f-464c-9ea1-ab75bec0ed9f {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.542787] env[61995]: DEBUG nova.network.neutron [-] [instance: 5527576a-d56d-42c3-a7f7-02c66c0d1b3d] Updating instance_info_cache with network_info: [] {{(pid=61995) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 554.543118] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg dc3e207576434f83bd4edbc66b8b1c77 in queue reply_757213bc08bb49dab178826d88b76f40 [ 554.555467] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dc3e207576434f83bd4edbc66b8b1c77 [ 554.580352] env[61995]: DEBUG oslo_vmware.api [None req-78466350-68e0-4e98-859f-aac3601dd53c tempest-ServersAdmin275Test-481182624 tempest-ServersAdmin275Test-481182624-project-admin] Task: {'id': task-378103, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61995) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 554.620236] env[61995]: DEBUG nova.network.neutron [None req-95ce5374-e6a2-4567-a19e-2479f98bd091 tempest-VolumesAssistedSnapshotsTest-926918021 tempest-VolumesAssistedSnapshotsTest-926918021-project-member] [instance: 6fffc743-caaa-4356-9406-6bdc6321aa1f] Successfully created port: b5b283c3-8a4b-4e77-827e-2748774f1e0f {{(pid=61995) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 554.639567] env[61995]: DEBUG oslo_concurrency.lockutils [None req-a85a9074-04e3-4381-94ff-506e7a8711e5 tempest-AttachInterfacesV270Test-1294541225 tempest-AttachInterfacesV270Test-1294541225-project-member] Releasing lock "refresh_cache-2dbe6731-bab0-4a47-91c7-a1d8b42dcec4" {{(pid=61995) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 554.640166] env[61995]: DEBUG nova.compute.manager [None req-a85a9074-04e3-4381-94ff-506e7a8711e5 tempest-AttachInterfacesV270Test-1294541225 tempest-AttachInterfacesV270Test-1294541225-project-member] [instance: 2dbe6731-bab0-4a47-91c7-a1d8b42dcec4] Start destroying the instance on the hypervisor. {{(pid=61995) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 554.640264] env[61995]: DEBUG nova.virt.vmwareapi.vmops [None req-a85a9074-04e3-4381-94ff-506e7a8711e5 tempest-AttachInterfacesV270Test-1294541225 tempest-AttachInterfacesV270Test-1294541225-project-member] [instance: 2dbe6731-bab0-4a47-91c7-a1d8b42dcec4] Destroying instance {{(pid=61995) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 554.640569] env[61995]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ae45bbe6-ad7a-4b89-9418-89431478c13d {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.649108] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04a1c2de-70de-4053-86fb-35b24f9685b1 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.672448] env[61995]: WARNING nova.virt.vmwareapi.vmops [None req-a85a9074-04e3-4381-94ff-506e7a8711e5 tempest-AttachInterfacesV270Test-1294541225 tempest-AttachInterfacesV270Test-1294541225-project-member] [instance: 2dbe6731-bab0-4a47-91c7-a1d8b42dcec4] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 2dbe6731-bab0-4a47-91c7-a1d8b42dcec4 could not be found. [ 554.672655] env[61995]: DEBUG nova.virt.vmwareapi.vmops [None req-a85a9074-04e3-4381-94ff-506e7a8711e5 tempest-AttachInterfacesV270Test-1294541225 tempest-AttachInterfacesV270Test-1294541225-project-member] [instance: 2dbe6731-bab0-4a47-91c7-a1d8b42dcec4] Instance destroyed {{(pid=61995) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 554.672829] env[61995]: INFO nova.compute.manager [None req-a85a9074-04e3-4381-94ff-506e7a8711e5 tempest-AttachInterfacesV270Test-1294541225 tempest-AttachInterfacesV270Test-1294541225-project-member] [instance: 2dbe6731-bab0-4a47-91c7-a1d8b42dcec4] Took 0.03 seconds to destroy the instance on the hypervisor. [ 554.673070] env[61995]: DEBUG oslo.service.loopingcall [None req-a85a9074-04e3-4381-94ff-506e7a8711e5 tempest-AttachInterfacesV270Test-1294541225 tempest-AttachInterfacesV270Test-1294541225-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61995) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 554.673286] env[61995]: DEBUG nova.compute.manager [-] [instance: 2dbe6731-bab0-4a47-91c7-a1d8b42dcec4] Deallocating network for instance {{(pid=61995) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2298}} [ 554.673375] env[61995]: DEBUG nova.network.neutron [-] [instance: 2dbe6731-bab0-4a47-91c7-a1d8b42dcec4] deallocate_for_instance() {{(pid=61995) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 554.757581] env[61995]: DEBUG nova.network.neutron [-] [instance: 2dbe6731-bab0-4a47-91c7-a1d8b42dcec4] Instance cache missing network info. {{(pid=61995) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 554.758188] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 5ebdf96bdb714bafaeea392d66e37912 in queue reply_757213bc08bb49dab178826d88b76f40 [ 554.766280] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5ebdf96bdb714bafaeea392d66e37912 [ 554.794294] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-9ace5a90-e467-428a-82b2-7a1c680dfdf4 tempest-ServersAdminNegativeTestJSON-2047713534 tempest-ServersAdminNegativeTestJSON-2047713534-project-member] Expecting reply to msg 30e2ca5b0e854477bb26fe6e26fcac02 in queue reply_757213bc08bb49dab178826d88b76f40 [ 554.804215] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 30e2ca5b0e854477bb26fe6e26fcac02 [ 554.845044] env[61995]: DEBUG nova.network.neutron [None req-b5402465-5749-4221-a8d2-1de6379c27b6 tempest-VolumesAdminNegativeTest-869695280 tempest-VolumesAdminNegativeTest-869695280-project-member] [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] Instance cache missing network info. {{(pid=61995) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 555.046059] env[61995]: INFO nova.compute.manager [-] [instance: 5527576a-d56d-42c3-a7f7-02c66c0d1b3d] Took 1.05 seconds to deallocate network for instance. [ 555.050551] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-9e02e760-5bb2-4f5c-92cd-9104f9d2f814 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] Expecting reply to msg 580e571935ce4b5798f5fff6d41a4456 in queue reply_757213bc08bb49dab178826d88b76f40 [ 555.081339] env[61995]: DEBUG oslo_vmware.api [None req-78466350-68e0-4e98-859f-aac3601dd53c tempest-ServersAdmin275Test-481182624 tempest-ServersAdmin275Test-481182624-project-admin] Task: {'id': task-378103, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61995) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 555.093465] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 580e571935ce4b5798f5fff6d41a4456 [ 555.134223] env[61995]: DEBUG nova.network.neutron [None req-b5402465-5749-4221-a8d2-1de6379c27b6 tempest-VolumesAdminNegativeTest-869695280 tempest-VolumesAdminNegativeTest-869695280-project-member] [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] Updating instance_info_cache with network_info: [] {{(pid=61995) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 555.134742] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-b5402465-5749-4221-a8d2-1de6379c27b6 tempest-VolumesAdminNegativeTest-869695280 tempest-VolumesAdminNegativeTest-869695280-project-member] Expecting reply to msg 4050721a58c841d9b1e7c663d9ca3e00 in queue reply_757213bc08bb49dab178826d88b76f40 [ 555.143258] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4050721a58c841d9b1e7c663d9ca3e00 [ 555.179501] env[61995]: DEBUG nova.compute.manager [req-d826b3a3-2e8e-4051-9596-8cc8c33aba87 req-af8827cc-92ce-457f-bd4b-ac92cc54f589 service nova] [instance: 2dbe6731-bab0-4a47-91c7-a1d8b42dcec4] Received event network-changed-c2d91fee-658c-42ae-9503-e12f3995ca7e {{(pid=61995) external_instance_event /opt/stack/nova/nova/compute/manager.py:11144}} [ 555.179692] env[61995]: DEBUG nova.compute.manager [req-d826b3a3-2e8e-4051-9596-8cc8c33aba87 req-af8827cc-92ce-457f-bd4b-ac92cc54f589 service nova] [instance: 2dbe6731-bab0-4a47-91c7-a1d8b42dcec4] Refreshing instance network info cache due to event network-changed-c2d91fee-658c-42ae-9503-e12f3995ca7e. {{(pid=61995) external_instance_event /opt/stack/nova/nova/compute/manager.py:11149}} [ 555.179963] env[61995]: DEBUG oslo_concurrency.lockutils [req-d826b3a3-2e8e-4051-9596-8cc8c33aba87 req-af8827cc-92ce-457f-bd4b-ac92cc54f589 service nova] Acquiring lock "refresh_cache-2dbe6731-bab0-4a47-91c7-a1d8b42dcec4" {{(pid=61995) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 555.180124] env[61995]: DEBUG oslo_concurrency.lockutils [req-d826b3a3-2e8e-4051-9596-8cc8c33aba87 req-af8827cc-92ce-457f-bd4b-ac92cc54f589 service nova] Acquired lock "refresh_cache-2dbe6731-bab0-4a47-91c7-a1d8b42dcec4" {{(pid=61995) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 555.180281] env[61995]: DEBUG nova.network.neutron [req-d826b3a3-2e8e-4051-9596-8cc8c33aba87 req-af8827cc-92ce-457f-bd4b-ac92cc54f589 service nova] [instance: 2dbe6731-bab0-4a47-91c7-a1d8b42dcec4] Refreshing network info cache for port c2d91fee-658c-42ae-9503-e12f3995ca7e {{(pid=61995) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 555.180692] env[61995]: INFO oslo_messaging._drivers.amqpdriver [req-d826b3a3-2e8e-4051-9596-8cc8c33aba87 req-af8827cc-92ce-457f-bd4b-ac92cc54f589 service nova] Expecting reply to msg 2ce0250cebae48b19a0580328ab3a8d1 in queue reply_757213bc08bb49dab178826d88b76f40 [ 555.187310] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2ce0250cebae48b19a0580328ab3a8d1 [ 555.259885] env[61995]: DEBUG nova.network.neutron [-] [instance: 2dbe6731-bab0-4a47-91c7-a1d8b42dcec4] Updating instance_info_cache with network_info: [] {{(pid=61995) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 555.260412] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 63bd30a835b94ecc8d38f7ceaab288da in queue reply_757213bc08bb49dab178826d88b76f40 [ 555.268035] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 63bd30a835b94ecc8d38f7ceaab288da [ 555.458984] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6089c9ff-4e46-4fdc-8dc0-a207bd2b9e7f {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.467940] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5f4bfbc-9d78-4bde-8f92-0bd8d1e0907e {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.504441] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb6808e8-d240-4eaa-a841-c4a7fd17aeab {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.512381] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4c4b4be-d8ee-4bdb-b38c-bd1885ba2458 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.528551] env[61995]: DEBUG nova.compute.provider_tree [None req-9ace5a90-e467-428a-82b2-7a1c680dfdf4 tempest-ServersAdminNegativeTestJSON-2047713534 tempest-ServersAdminNegativeTestJSON-2047713534-project-member] Inventory has not changed in ProviderTree for provider: c68e615f-c0c6-4278-bb7b-a579e68878a7 {{(pid=61995) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 555.529253] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-9ace5a90-e467-428a-82b2-7a1c680dfdf4 tempest-ServersAdminNegativeTestJSON-2047713534 tempest-ServersAdminNegativeTestJSON-2047713534-project-member] Expecting reply to msg 604ccb8fe368470a98841d0f8a275829 in queue reply_757213bc08bb49dab178826d88b76f40 [ 555.538417] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 604ccb8fe368470a98841d0f8a275829 [ 555.553638] env[61995]: DEBUG oslo_concurrency.lockutils [None req-9e02e760-5bb2-4f5c-92cd-9104f9d2f814 tempest-ServerDiagnosticsV248Test-872630016 tempest-ServerDiagnosticsV248Test-872630016-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 555.582528] env[61995]: DEBUG oslo_vmware.api [None req-78466350-68e0-4e98-859f-aac3601dd53c tempest-ServersAdmin275Test-481182624 tempest-ServersAdmin275Test-481182624-project-admin] Task: {'id': task-378103, 'name': DeleteDatastoreFile_Task, 'duration_secs': 1.412935} completed successfully. {{(pid=61995) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 555.582789] env[61995]: DEBUG nova.virt.vmwareapi.ds_util [None req-78466350-68e0-4e98-859f-aac3601dd53c tempest-ServersAdmin275Test-481182624 tempest-ServersAdmin275Test-481182624-project-admin] Deleted the datastore file {{(pid=61995) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 555.582972] env[61995]: DEBUG nova.virt.vmwareapi.vmops [None req-78466350-68e0-4e98-859f-aac3601dd53c tempest-ServersAdmin275Test-481182624 tempest-ServersAdmin275Test-481182624-project-admin] [instance: 1bf7f97f-ccfe-4ee0-a6fb-2234769c4b62] Deleted contents of the VM from datastore datastore2 {{(pid=61995) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 555.583144] env[61995]: DEBUG nova.virt.vmwareapi.vmops [None req-78466350-68e0-4e98-859f-aac3601dd53c tempest-ServersAdmin275Test-481182624 tempest-ServersAdmin275Test-481182624-project-admin] [instance: 1bf7f97f-ccfe-4ee0-a6fb-2234769c4b62] Instance destroyed {{(pid=61995) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 555.586229] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-78466350-68e0-4e98-859f-aac3601dd53c tempest-ServersAdmin275Test-481182624 tempest-ServersAdmin275Test-481182624-project-admin] Expecting reply to msg 63cb09e359cf4cf7b2c9a959fcd4e948 in queue reply_757213bc08bb49dab178826d88b76f40 [ 555.637292] env[61995]: DEBUG oslo_concurrency.lockutils [None req-b5402465-5749-4221-a8d2-1de6379c27b6 tempest-VolumesAdminNegativeTest-869695280 tempest-VolumesAdminNegativeTest-869695280-project-member] Releasing lock "refresh_cache-cfffe8e0-4074-41c9-8e1b-49d621fc3c1b" {{(pid=61995) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 555.637550] env[61995]: DEBUG nova.compute.manager [None req-b5402465-5749-4221-a8d2-1de6379c27b6 tempest-VolumesAdminNegativeTest-869695280 tempest-VolumesAdminNegativeTest-869695280-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61995) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3038}} [ 555.637789] env[61995]: DEBUG nova.compute.manager [None req-b5402465-5749-4221-a8d2-1de6379c27b6 tempest-VolumesAdminNegativeTest-869695280 tempest-VolumesAdminNegativeTest-869695280-project-member] [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] Deallocating network for instance {{(pid=61995) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2298}} [ 555.637982] env[61995]: DEBUG nova.network.neutron [None req-b5402465-5749-4221-a8d2-1de6379c27b6 tempest-VolumesAdminNegativeTest-869695280 tempest-VolumesAdminNegativeTest-869695280-project-member] [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] deallocate_for_instance() {{(pid=61995) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 555.640113] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 63cb09e359cf4cf7b2c9a959fcd4e948 [ 555.699172] env[61995]: DEBUG nova.network.neutron [None req-b5402465-5749-4221-a8d2-1de6379c27b6 tempest-VolumesAdminNegativeTest-869695280 tempest-VolumesAdminNegativeTest-869695280-project-member] [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] Instance cache missing network info. {{(pid=61995) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 555.699172] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-b5402465-5749-4221-a8d2-1de6379c27b6 tempest-VolumesAdminNegativeTest-869695280 tempest-VolumesAdminNegativeTest-869695280-project-member] Expecting reply to msg cde1e4147882451fa45b24ba55deda9c in queue reply_757213bc08bb49dab178826d88b76f40 [ 555.709092] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cde1e4147882451fa45b24ba55deda9c [ 555.725101] env[61995]: DEBUG nova.network.neutron [req-d826b3a3-2e8e-4051-9596-8cc8c33aba87 req-af8827cc-92ce-457f-bd4b-ac92cc54f589 service nova] [instance: 2dbe6731-bab0-4a47-91c7-a1d8b42dcec4] Instance cache missing network info. {{(pid=61995) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 555.763759] env[61995]: INFO nova.compute.manager [-] [instance: 2dbe6731-bab0-4a47-91c7-a1d8b42dcec4] Took 1.09 seconds to deallocate network for instance. [ 555.765151] env[61995]: DEBUG nova.compute.claims [None req-a85a9074-04e3-4381-94ff-506e7a8711e5 tempest-AttachInterfacesV270Test-1294541225 tempest-AttachInterfacesV270Test-1294541225-project-member] [instance: 2dbe6731-bab0-4a47-91c7-a1d8b42dcec4] Aborting claim: {{(pid=61995) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 555.765518] env[61995]: DEBUG oslo_concurrency.lockutils [None req-a85a9074-04e3-4381-94ff-506e7a8711e5 tempest-AttachInterfacesV270Test-1294541225 tempest-AttachInterfacesV270Test-1294541225-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 556.035290] env[61995]: DEBUG nova.scheduler.client.report [None req-9ace5a90-e467-428a-82b2-7a1c680dfdf4 tempest-ServersAdminNegativeTestJSON-2047713534 tempest-ServersAdminNegativeTestJSON-2047713534-project-member] Inventory has not changed for provider c68e615f-c0c6-4278-bb7b-a579e68878a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61995) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 556.035290] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-9ace5a90-e467-428a-82b2-7a1c680dfdf4 tempest-ServersAdminNegativeTestJSON-2047713534 tempest-ServersAdminNegativeTestJSON-2047713534-project-member] Expecting reply to msg 3f544bc0904442999f5a9c6aed2e32a9 in queue reply_757213bc08bb49dab178826d88b76f40 [ 556.051844] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3f544bc0904442999f5a9c6aed2e32a9 [ 556.077470] env[61995]: DEBUG nova.network.neutron [req-d826b3a3-2e8e-4051-9596-8cc8c33aba87 req-af8827cc-92ce-457f-bd4b-ac92cc54f589 service nova] [instance: 2dbe6731-bab0-4a47-91c7-a1d8b42dcec4] Updating instance_info_cache with network_info: [] {{(pid=61995) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 556.078023] env[61995]: INFO oslo_messaging._drivers.amqpdriver [req-d826b3a3-2e8e-4051-9596-8cc8c33aba87 req-af8827cc-92ce-457f-bd4b-ac92cc54f589 service nova] Expecting reply to msg 5dcd1eab4bf0434c8717b69ef20c8b9c in queue reply_757213bc08bb49dab178826d88b76f40 [ 556.087738] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5dcd1eab4bf0434c8717b69ef20c8b9c [ 556.090298] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-78466350-68e0-4e98-859f-aac3601dd53c tempest-ServersAdmin275Test-481182624 tempest-ServersAdmin275Test-481182624-project-admin] Expecting reply to msg 46900dbffb82401f811c34252232c88b in queue reply_757213bc08bb49dab178826d88b76f40 [ 556.162297] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 46900dbffb82401f811c34252232c88b [ 556.200393] env[61995]: DEBUG nova.network.neutron [None req-b5402465-5749-4221-a8d2-1de6379c27b6 tempest-VolumesAdminNegativeTest-869695280 tempest-VolumesAdminNegativeTest-869695280-project-member] [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] Updating instance_info_cache with network_info: [] {{(pid=61995) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 556.200962] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-b5402465-5749-4221-a8d2-1de6379c27b6 tempest-VolumesAdminNegativeTest-869695280 tempest-VolumesAdminNegativeTest-869695280-project-member] Expecting reply to msg 5bcbfcb233444f36a8889a8b9fbe1d13 in queue reply_757213bc08bb49dab178826d88b76f40 [ 556.209380] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5bcbfcb233444f36a8889a8b9fbe1d13 [ 556.538031] env[61995]: DEBUG oslo_concurrency.lockutils [None req-9ace5a90-e467-428a-82b2-7a1c680dfdf4 tempest-ServersAdminNegativeTestJSON-2047713534 tempest-ServersAdminNegativeTestJSON-2047713534-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.253s {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 556.538631] env[61995]: DEBUG nova.compute.manager [None req-9ace5a90-e467-428a-82b2-7a1c680dfdf4 tempest-ServersAdminNegativeTestJSON-2047713534 tempest-ServersAdminNegativeTestJSON-2047713534-project-member] [instance: aa885305-7cdb-44d3-9bd2-880eb042a63e] Start building networks asynchronously for instance. {{(pid=61995) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 556.540312] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-9ace5a90-e467-428a-82b2-7a1c680dfdf4 tempest-ServersAdminNegativeTestJSON-2047713534 tempest-ServersAdminNegativeTestJSON-2047713534-project-member] Expecting reply to msg c6e6cb55cbc440d1a58b88d2276d8dd4 in queue reply_757213bc08bb49dab178826d88b76f40 [ 556.541379] env[61995]: DEBUG oslo_concurrency.lockutils [None req-c2ead0ea-6b64-4408-9280-a0773cf9692e tempest-ServersWithSpecificFlavorTestJSON-889640241 tempest-ServersWithSpecificFlavorTestJSON-889640241-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.837s {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 556.547163] env[61995]: INFO nova.compute.claims [None req-c2ead0ea-6b64-4408-9280-a0773cf9692e tempest-ServersWithSpecificFlavorTestJSON-889640241 tempest-ServersWithSpecificFlavorTestJSON-889640241-project-member] [instance: afe9a3ca-2235-4a2b-9b9f-7988a77f3c2d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 556.548646] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-c2ead0ea-6b64-4408-9280-a0773cf9692e tempest-ServersWithSpecificFlavorTestJSON-889640241 tempest-ServersWithSpecificFlavorTestJSON-889640241-project-member] Expecting reply to msg cff17180de394a49a9e7f05adf19d1bc in queue reply_757213bc08bb49dab178826d88b76f40 [ 556.578897] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c6e6cb55cbc440d1a58b88d2276d8dd4 [ 556.584548] env[61995]: DEBUG oslo_concurrency.lockutils [req-d826b3a3-2e8e-4051-9596-8cc8c33aba87 req-af8827cc-92ce-457f-bd4b-ac92cc54f589 service nova] Releasing lock "refresh_cache-2dbe6731-bab0-4a47-91c7-a1d8b42dcec4" {{(pid=61995) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 556.584796] env[61995]: DEBUG nova.compute.manager [req-d826b3a3-2e8e-4051-9596-8cc8c33aba87 req-af8827cc-92ce-457f-bd4b-ac92cc54f589 service nova] [instance: 2dbe6731-bab0-4a47-91c7-a1d8b42dcec4] Received event network-vif-deleted-c2d91fee-658c-42ae-9503-e12f3995ca7e {{(pid=61995) external_instance_event /opt/stack/nova/nova/compute/manager.py:11144}} [ 556.595195] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cff17180de394a49a9e7f05adf19d1bc [ 556.624312] env[61995]: DEBUG nova.virt.hardware [None req-78466350-68e0-4e98-859f-aac3601dd53c tempest-ServersAdmin275Test-481182624 tempest-ServersAdmin275Test-481182624-project-admin] Getting desirable topologies for flavor Flavor(created_at=2024-08-06T07:57:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-06T07:57:11Z,direct_url=,disk_format='vmdk',id=947125f0-9664-40eb-953e-b1373b076c9f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3dc1e6434ef44fd0be01fd4ebf2b62ef',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-06T07:57:12Z,virtual_size=,visibility=), allow threads: False {{(pid=61995) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 556.624312] env[61995]: DEBUG nova.virt.hardware [None req-78466350-68e0-4e98-859f-aac3601dd53c tempest-ServersAdmin275Test-481182624 tempest-ServersAdmin275Test-481182624-project-admin] Flavor limits 0:0:0 {{(pid=61995) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 556.624312] env[61995]: DEBUG nova.virt.hardware [None req-78466350-68e0-4e98-859f-aac3601dd53c tempest-ServersAdmin275Test-481182624 tempest-ServersAdmin275Test-481182624-project-admin] Image limits 0:0:0 {{(pid=61995) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 556.624312] env[61995]: DEBUG nova.virt.hardware [None req-78466350-68e0-4e98-859f-aac3601dd53c tempest-ServersAdmin275Test-481182624 tempest-ServersAdmin275Test-481182624-project-admin] Flavor pref 0:0:0 {{(pid=61995) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 556.624532] env[61995]: DEBUG nova.virt.hardware [None req-78466350-68e0-4e98-859f-aac3601dd53c tempest-ServersAdmin275Test-481182624 tempest-ServersAdmin275Test-481182624-project-admin] Image pref 0:0:0 {{(pid=61995) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 556.624532] env[61995]: DEBUG nova.virt.hardware [None req-78466350-68e0-4e98-859f-aac3601dd53c tempest-ServersAdmin275Test-481182624 tempest-ServersAdmin275Test-481182624-project-admin] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61995) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 556.624532] env[61995]: DEBUG nova.virt.hardware [None req-78466350-68e0-4e98-859f-aac3601dd53c tempest-ServersAdmin275Test-481182624 tempest-ServersAdmin275Test-481182624-project-admin] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61995) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 556.624532] env[61995]: DEBUG nova.virt.hardware [None req-78466350-68e0-4e98-859f-aac3601dd53c tempest-ServersAdmin275Test-481182624 tempest-ServersAdmin275Test-481182624-project-admin] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61995) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 556.624532] env[61995]: DEBUG nova.virt.hardware [None req-78466350-68e0-4e98-859f-aac3601dd53c tempest-ServersAdmin275Test-481182624 tempest-ServersAdmin275Test-481182624-project-admin] Got 1 possible topologies {{(pid=61995) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 556.624691] env[61995]: DEBUG nova.virt.hardware [None req-78466350-68e0-4e98-859f-aac3601dd53c tempest-ServersAdmin275Test-481182624 tempest-ServersAdmin275Test-481182624-project-admin] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61995) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 556.624691] env[61995]: DEBUG nova.virt.hardware [None req-78466350-68e0-4e98-859f-aac3601dd53c tempest-ServersAdmin275Test-481182624 tempest-ServersAdmin275Test-481182624-project-admin] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61995) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 556.624691] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f3a5916-2a8e-4e3e-aa0f-8069585f5c3b {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.633322] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecd4b267-a2ec-4cb0-8f7a-1df69f3a4b95 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.655728] env[61995]: DEBUG nova.virt.vmwareapi.vmops [None req-78466350-68e0-4e98-859f-aac3601dd53c tempest-ServersAdmin275Test-481182624 tempest-ServersAdmin275Test-481182624-project-admin] [instance: 1bf7f97f-ccfe-4ee0-a6fb-2234769c4b62] Instance VIF info [] {{(pid=61995) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 556.664909] env[61995]: DEBUG oslo.service.loopingcall [None req-78466350-68e0-4e98-859f-aac3601dd53c tempest-ServersAdmin275Test-481182624 tempest-ServersAdmin275Test-481182624-project-admin] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61995) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 556.665211] env[61995]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1bf7f97f-ccfe-4ee0-a6fb-2234769c4b62] Creating VM on the ESX host {{(pid=61995) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 556.665515] env[61995]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-410da368-3343-46d0-aa87-23113562c08d {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.691800] env[61995]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 556.691800] env[61995]: value = "task-378105" [ 556.691800] env[61995]: _type = "Task" [ 556.691800] env[61995]: } to complete. {{(pid=61995) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 556.716229] env[61995]: INFO nova.compute.manager [None req-b5402465-5749-4221-a8d2-1de6379c27b6 tempest-VolumesAdminNegativeTest-869695280 tempest-VolumesAdminNegativeTest-869695280-project-member] [instance: cfffe8e0-4074-41c9-8e1b-49d621fc3c1b] Took 1.08 seconds to deallocate network for instance. [ 556.720354] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-b5402465-5749-4221-a8d2-1de6379c27b6 tempest-VolumesAdminNegativeTest-869695280 tempest-VolumesAdminNegativeTest-869695280-project-member] Expecting reply to msg 12149faf351e4200af8aaee31c2b1ffd in queue reply_757213bc08bb49dab178826d88b76f40 [ 556.721893] env[61995]: DEBUG oslo_vmware.api [-] Task: {'id': task-378105, 'name': CreateVM_Task} progress is 6%. {{(pid=61995) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 556.785686] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 12149faf351e4200af8aaee31c2b1ffd [ 556.879155] env[61995]: DEBUG oslo_concurrency.lockutils [None req-18b86b01-7dfe-4991-8317-17cae69a93a6 tempest-TenantUsagesTestJSON-1961309574 tempest-TenantUsagesTestJSON-1961309574-project-member] Acquiring lock "bd326bfa-d15a-4ce5-b4f6-6738aae0f60c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 556.879423] env[61995]: DEBUG oslo_concurrency.lockutils [None req-18b86b01-7dfe-4991-8317-17cae69a93a6 tempest-TenantUsagesTestJSON-1961309574 tempest-TenantUsagesTestJSON-1961309574-project-member] Lock "bd326bfa-d15a-4ce5-b4f6-6738aae0f60c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 556.879952] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-18b86b01-7dfe-4991-8317-17cae69a93a6 tempest-TenantUsagesTestJSON-1961309574 tempest-TenantUsagesTestJSON-1961309574-project-member] Expecting reply to msg 997e3ce95f1a4f6e89adaa736ebd1145 in queue reply_757213bc08bb49dab178826d88b76f40 [ 556.896800] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 997e3ce95f1a4f6e89adaa736ebd1145 [ 557.051467] env[61995]: DEBUG nova.compute.utils [None req-9ace5a90-e467-428a-82b2-7a1c680dfdf4 tempest-ServersAdminNegativeTestJSON-2047713534 tempest-ServersAdminNegativeTestJSON-2047713534-project-member] Using /dev/sd instead of None {{(pid=61995) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 557.052129] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-9ace5a90-e467-428a-82b2-7a1c680dfdf4 tempest-ServersAdminNegativeTestJSON-2047713534 tempest-ServersAdminNegativeTestJSON-2047713534-project-member] Expecting reply to msg 5e0e1661afe0456cb2704dffdb174b02 in queue reply_757213bc08bb49dab178826d88b76f40 [ 557.054504] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-c2ead0ea-6b64-4408-9280-a0773cf9692e tempest-ServersWithSpecificFlavorTestJSON-889640241 tempest-ServersWithSpecificFlavorTestJSON-889640241-project-member] Expecting reply to msg 01bb4b9d8bcd4f82886e8446d61a3ffd in queue reply_757213bc08bb49dab178826d88b76f40 [ 557.055281] env[61995]: DEBUG nova.compute.manager [None req-9ace5a90-e467-428a-82b2-7a1c680dfdf4 tempest-ServersAdminNegativeTestJSON-2047713534 tempest-ServersAdminNegativeTestJSON-2047713534-project-member] [instance: aa885305-7cdb-44d3-9bd2-880eb042a63e] Allocating IP information in the background. {{(pid=61995) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1990}} [ 557.055445] env[61995]: DEBUG nova.network.neutron [None req-9ace5a90-e467-428a-82b2-7a1c680dfdf4 tempest-ServersAdminNegativeTestJSON-2047713534 tempest-ServersAdminNegativeTestJSON-2047713534-project-member] [instance: aa885305-7cdb-44d3-9bd2-880eb042a63e] allocate_for_instance() {{(pid=61995) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 557.070721] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5e0e1661afe0456cb2704dffdb174b02 [ 557.073114] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 01bb4b9d8bcd4f82886e8446d61a3ffd [ 557.201990] env[61995]: DEBUG oslo_vmware.api [-] Task: {'id': task-378105, 'name': CreateVM_Task, 'duration_secs': 0.260535} completed successfully. {{(pid=61995) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 557.202162] env[61995]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1bf7f97f-ccfe-4ee0-a6fb-2234769c4b62] Created VM on the ESX host {{(pid=61995) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 557.202583] env[61995]: DEBUG oslo_concurrency.lockutils [None req-78466350-68e0-4e98-859f-aac3601dd53c tempest-ServersAdmin275Test-481182624 tempest-ServersAdmin275Test-481182624-project-admin] Acquiring lock "[datastore1] devstack-image-cache_base/947125f0-9664-40eb-953e-b1373b076c9f" {{(pid=61995) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 557.202734] env[61995]: DEBUG oslo_concurrency.lockutils [None req-78466350-68e0-4e98-859f-aac3601dd53c tempest-ServersAdmin275Test-481182624 tempest-ServersAdmin275Test-481182624-project-admin] Acquired lock "[datastore1] devstack-image-cache_base/947125f0-9664-40eb-953e-b1373b076c9f" {{(pid=61995) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 557.203084] env[61995]: DEBUG oslo_concurrency.lockutils [None req-78466350-68e0-4e98-859f-aac3601dd53c tempest-ServersAdmin275Test-481182624 tempest-ServersAdmin275Test-481182624-project-admin] Acquired external semaphore "[datastore1] devstack-image-cache_base/947125f0-9664-40eb-953e-b1373b076c9f" {{(pid=61995) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 557.203650] env[61995]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-49fa6af3-4a60-4bcb-b12a-b4519dfbad6c {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.209123] env[61995]: DEBUG oslo_vmware.api [None req-78466350-68e0-4e98-859f-aac3601dd53c tempest-ServersAdmin275Test-481182624 tempest-ServersAdmin275Test-481182624-project-admin] Waiting for the task: (returnval){ [ 557.209123] env[61995]: value = "session[5258be39-d7e5-2e06-99ec-0419db7e1a3e]5277ed17-84ad-d5ca-c542-3686df3568dd" [ 557.209123] env[61995]: _type = "Task" [ 557.209123] env[61995]: } to complete. {{(pid=61995) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 557.219271] env[61995]: DEBUG oslo_vmware.api [None req-78466350-68e0-4e98-859f-aac3601dd53c tempest-ServersAdmin275Test-481182624 tempest-ServersAdmin275Test-481182624-project-admin] Task: {'id': session[5258be39-d7e5-2e06-99ec-0419db7e1a3e]5277ed17-84ad-d5ca-c542-3686df3568dd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61995) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 557.225554] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-b5402465-5749-4221-a8d2-1de6379c27b6 tempest-VolumesAdminNegativeTest-869695280 tempest-VolumesAdminNegativeTest-869695280-project-member] Expecting reply to msg a42aa9496dee4a4dac9f9338b9c69ae1 in queue reply_757213bc08bb49dab178826d88b76f40 [ 557.266767] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a42aa9496dee4a4dac9f9338b9c69ae1 [ 557.277509] env[61995]: DEBUG nova.policy [None req-9ace5a90-e467-428a-82b2-7a1c680dfdf4 tempest-ServersAdminNegativeTestJSON-2047713534 tempest-ServersAdminNegativeTestJSON-2047713534-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '59eca93628cf4144a4545d5c9a2727ff', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5062f688e6cf49a6b93cdfa3d63d5af6', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61995) authorize /opt/stack/nova/nova/policy.py:203}} [ 557.329573] env[61995]: ERROR nova.compute.manager [None req-95ce5374-e6a2-4567-a19e-2479f98bd091 tempest-VolumesAssistedSnapshotsTest-926918021 tempest-VolumesAssistedSnapshotsTest-926918021-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port b5b283c3-8a4b-4e77-827e-2748774f1e0f, please check neutron logs for more information. [ 557.329573] env[61995]: ERROR nova.compute.manager Traceback (most recent call last): [ 557.329573] env[61995]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1998, in _allocate_network_async [ 557.329573] env[61995]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 557.329573] env[61995]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 557.329573] env[61995]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 557.329573] env[61995]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 557.329573] env[61995]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 557.329573] env[61995]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 557.329573] env[61995]: ERROR nova.compute.manager self.force_reraise() [ 557.329573] env[61995]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 557.329573] env[61995]: ERROR nova.compute.manager raise self.value [ 557.329573] env[61995]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 557.329573] env[61995]: ERROR nova.compute.manager updated_port = self._update_port( [ 557.329573] env[61995]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 557.329573] env[61995]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 557.330113] env[61995]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 557.330113] env[61995]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 557.330113] env[61995]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port b5b283c3-8a4b-4e77-827e-2748774f1e0f, please check neutron logs for more information. [ 557.330113] env[61995]: ERROR nova.compute.manager [ 557.330113] env[61995]: Traceback (most recent call last): [ 557.330113] env[61995]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 557.330113] env[61995]: listener.cb(fileno) [ 557.330113] env[61995]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 557.330113] env[61995]: result = function(*args, **kwargs) [ 557.330113] env[61995]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 557.330113] env[61995]: return func(*args, **kwargs) [ 557.330113] env[61995]: File "/opt/stack/nova/nova/compute/manager.py", line 2020, in _allocate_network_async [ 557.330113] env[61995]: raise e [ 557.330113] env[61995]: File "/opt/stack/nova/nova/compute/manager.py", line 1998, in _allocate_network_async [ 557.330113] env[61995]: nwinfo = self.network_api.allocate_for_instance( [ 557.330113] env[61995]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 557.330113] env[61995]: created_port_ids = self._update_ports_for_instance( [ 557.330113] env[61995]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 557.330113] env[61995]: with excutils.save_and_reraise_exception(): [ 557.330113] env[61995]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 557.330113] env[61995]: self.force_reraise() [ 557.330113] env[61995]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 557.330113] env[61995]: raise self.value [ 557.330113] env[61995]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 557.330113] env[61995]: updated_port = self._update_port( [ 557.330113] env[61995]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 557.330113] env[61995]: _ensure_no_port_binding_failure(port) [ 557.330113] env[61995]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 557.330113] env[61995]: raise exception.PortBindingFailed(port_id=port['id']) [ 557.331306] env[61995]: nova.exception.PortBindingFailed: Binding failed for port b5b283c3-8a4b-4e77-827e-2748774f1e0f, please check neutron logs for more information. [ 557.331306] env[61995]: Removing descriptor: 18 [ 557.331306] env[61995]: ERROR nova.compute.manager [None req-95ce5374-e6a2-4567-a19e-2479f98bd091 tempest-VolumesAssistedSnapshotsTest-926918021 tempest-VolumesAssistedSnapshotsTest-926918021-project-member] [instance: 6fffc743-caaa-4356-9406-6bdc6321aa1f] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port b5b283c3-8a4b-4e77-827e-2748774f1e0f, please check neutron logs for more information. [ 557.331306] env[61995]: ERROR nova.compute.manager [instance: 6fffc743-caaa-4356-9406-6bdc6321aa1f] Traceback (most recent call last): [ 557.331306] env[61995]: ERROR nova.compute.manager [instance: 6fffc743-caaa-4356-9406-6bdc6321aa1f] File "/opt/stack/nova/nova/compute/manager.py", line 2903, in _build_resources [ 557.331306] env[61995]: ERROR nova.compute.manager [instance: 6fffc743-caaa-4356-9406-6bdc6321aa1f] yield resources [ 557.331306] env[61995]: ERROR nova.compute.manager [instance: 6fffc743-caaa-4356-9406-6bdc6321aa1f] File "/opt/stack/nova/nova/compute/manager.py", line 2650, in _build_and_run_instance [ 557.331306] env[61995]: ERROR nova.compute.manager [instance: 6fffc743-caaa-4356-9406-6bdc6321aa1f] self.driver.spawn(context, instance, image_meta, [ 557.331306] env[61995]: ERROR nova.compute.manager [instance: 6fffc743-caaa-4356-9406-6bdc6321aa1f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 557.331306] env[61995]: ERROR nova.compute.manager [instance: 6fffc743-caaa-4356-9406-6bdc6321aa1f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 557.331306] env[61995]: ERROR nova.compute.manager [instance: 6fffc743-caaa-4356-9406-6bdc6321aa1f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 557.331306] env[61995]: ERROR nova.compute.manager [instance: 6fffc743-caaa-4356-9406-6bdc6321aa1f] vm_ref = self.build_virtual_machine(instance, [ 557.331866] env[61995]: ERROR nova.compute.manager [instance: 6fffc743-caaa-4356-9406-6bdc6321aa1f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 557.331866] env[61995]: ERROR nova.compute.manager [instance: 6fffc743-caaa-4356-9406-6bdc6321aa1f] vif_infos = vmwarevif.get_vif_info(self._session, [ 557.331866] env[61995]: ERROR nova.compute.manager [instance: 6fffc743-caaa-4356-9406-6bdc6321aa1f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 557.331866] env[61995]: ERROR nova.compute.manager [instance: 6fffc743-caaa-4356-9406-6bdc6321aa1f] for vif in network_info: [ 557.331866] env[61995]: ERROR nova.compute.manager [instance: 6fffc743-caaa-4356-9406-6bdc6321aa1f] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 557.331866] env[61995]: ERROR nova.compute.manager [instance: 6fffc743-caaa-4356-9406-6bdc6321aa1f] return self._sync_wrapper(fn, *args, **kwargs) [ 557.331866] env[61995]: ERROR nova.compute.manager [instance: 6fffc743-caaa-4356-9406-6bdc6321aa1f] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 557.331866] env[61995]: ERROR nova.compute.manager [instance: 6fffc743-caaa-4356-9406-6bdc6321aa1f] self.wait() [ 557.331866] env[61995]: ERROR nova.compute.manager [instance: 6fffc743-caaa-4356-9406-6bdc6321aa1f] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 557.331866] env[61995]: ERROR nova.compute.manager [instance: 6fffc743-caaa-4356-9406-6bdc6321aa1f] self[:] = self._gt.wait() [ 557.331866] env[61995]: ERROR nova.compute.manager [instance: 6fffc743-caaa-4356-9406-6bdc6321aa1f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 557.331866] env[61995]: ERROR nova.compute.manager [instance: 6fffc743-caaa-4356-9406-6bdc6321aa1f] return self._exit_event.wait() [ 557.331866] env[61995]: ERROR nova.compute.manager [instance: 6fffc743-caaa-4356-9406-6bdc6321aa1f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 557.332267] env[61995]: ERROR nova.compute.manager [instance: 6fffc743-caaa-4356-9406-6bdc6321aa1f] result = hub.switch() [ 557.332267] env[61995]: ERROR nova.compute.manager [instance: 6fffc743-caaa-4356-9406-6bdc6321aa1f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 557.332267] env[61995]: ERROR nova.compute.manager [instance: 6fffc743-caaa-4356-9406-6bdc6321aa1f] return self.greenlet.switch() [ 557.332267] env[61995]: ERROR nova.compute.manager [instance: 6fffc743-caaa-4356-9406-6bdc6321aa1f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 557.332267] env[61995]: ERROR nova.compute.manager [instance: 6fffc743-caaa-4356-9406-6bdc6321aa1f] result = function(*args, **kwargs) [ 557.332267] env[61995]: ERROR nova.compute.manager [instance: 6fffc743-caaa-4356-9406-6bdc6321aa1f] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 557.332267] env[61995]: ERROR nova.compute.manager [instance: 6fffc743-caaa-4356-9406-6bdc6321aa1f] return func(*args, **kwargs) [ 557.332267] env[61995]: ERROR nova.compute.manager [instance: 6fffc743-caaa-4356-9406-6bdc6321aa1f] File "/opt/stack/nova/nova/compute/manager.py", line 2020, in _allocate_network_async [ 557.332267] env[61995]: ERROR nova.compute.manager [instance: 6fffc743-caaa-4356-9406-6bdc6321aa1f] raise e [ 557.332267] env[61995]: ERROR nova.compute.manager [instance: 6fffc743-caaa-4356-9406-6bdc6321aa1f] File "/opt/stack/nova/nova/compute/manager.py", line 1998, in _allocate_network_async [ 557.332267] env[61995]: ERROR nova.compute.manager [instance: 6fffc743-caaa-4356-9406-6bdc6321aa1f] nwinfo = self.network_api.allocate_for_instance( [ 557.332267] env[61995]: ERROR nova.compute.manager [instance: 6fffc743-caaa-4356-9406-6bdc6321aa1f] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 557.332267] env[61995]: ERROR nova.compute.manager [instance: 6fffc743-caaa-4356-9406-6bdc6321aa1f] created_port_ids = self._update_ports_for_instance( [ 557.332630] env[61995]: ERROR nova.compute.manager [instance: 6fffc743-caaa-4356-9406-6bdc6321aa1f] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 557.332630] env[61995]: ERROR nova.compute.manager [instance: 6fffc743-caaa-4356-9406-6bdc6321aa1f] with excutils.save_and_reraise_exception(): [ 557.332630] env[61995]: ERROR nova.compute.manager [instance: 6fffc743-caaa-4356-9406-6bdc6321aa1f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 557.332630] env[61995]: ERROR nova.compute.manager [instance: 6fffc743-caaa-4356-9406-6bdc6321aa1f] self.force_reraise() [ 557.332630] env[61995]: ERROR nova.compute.manager [instance: 6fffc743-caaa-4356-9406-6bdc6321aa1f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 557.332630] env[61995]: ERROR nova.compute.manager [instance: 6fffc743-caaa-4356-9406-6bdc6321aa1f] raise self.value [ 557.332630] env[61995]: ERROR nova.compute.manager [instance: 6fffc743-caaa-4356-9406-6bdc6321aa1f] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 557.332630] env[61995]: ERROR nova.compute.manager [instance: 6fffc743-caaa-4356-9406-6bdc6321aa1f] updated_port = self._update_port( [ 557.332630] env[61995]: ERROR nova.compute.manager [instance: 6fffc743-caaa-4356-9406-6bdc6321aa1f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 557.332630] env[61995]: ERROR nova.compute.manager [instance: 6fffc743-caaa-4356-9406-6bdc6321aa1f] _ensure_no_port_binding_failure(port) [ 557.332630] env[61995]: ERROR nova.compute.manager [instance: 6fffc743-caaa-4356-9406-6bdc6321aa1f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 557.332630] env[61995]: ERROR nova.compute.manager [instance: 6fffc743-caaa-4356-9406-6bdc6321aa1f] raise exception.PortBindingFailed(port_id=port['id']) [ 557.333001] env[61995]: ERROR nova.compute.manager [instance: 6fffc743-caaa-4356-9406-6bdc6321aa1f] nova.exception.PortBindingFailed: Binding failed for port b5b283c3-8a4b-4e77-827e-2748774f1e0f, please check neutron logs for more information. [ 557.333001] env[61995]: ERROR nova.compute.manager [instance: 6fffc743-caaa-4356-9406-6bdc6321aa1f] [ 557.333001] env[61995]: INFO nova.compute.manager [None req-95ce5374-e6a2-4567-a19e-2479f98bd091 tempest-VolumesAssistedSnapshotsTest-926918021 tempest-VolumesAssistedSnapshotsTest-926918021-project-member] [instance: 6fffc743-caaa-4356-9406-6bdc6321aa1f] Terminating instance [ 557.333001] env[61995]: DEBUG oslo_concurrency.lockutils [None req-95ce5374-e6a2-4567-a19e-2479f98bd091 tempest-VolumesAssistedSnapshotsTest-926918021 tempest-VolumesAssistedSnapshotsTest-926918021-project-member] Acquiring lock "refresh_cache-6fffc743-caaa-4356-9406-6bdc6321aa1f" {{(pid=61995) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 557.333165] env[61995]: DEBUG oslo_concurrency.lockutils [None req-95ce5374-e6a2-4567-a19e-2479f98bd091 tempest-VolumesAssistedSnapshotsTest-926918021 tempest-VolumesAssistedSnapshotsTest-926918021-project-member] Acquired lock "refresh_cache-6fffc743-caaa-4356-9406-6bdc6321aa1f" {{(pid=61995) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 557.334288] env[61995]: DEBUG nova.network.neutron [None req-95ce5374-e6a2-4567-a19e-2479f98bd091 tempest-VolumesAssistedSnapshotsTest-926918021 tempest-VolumesAssistedSnapshotsTest-926918021-project-member] [instance: 6fffc743-caaa-4356-9406-6bdc6321aa1f] Building network info cache for instance {{(pid=61995) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 557.334288] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-95ce5374-e6a2-4567-a19e-2479f98bd091 tempest-VolumesAssistedSnapshotsTest-926918021 tempest-VolumesAssistedSnapshotsTest-926918021-project-member] Expecting reply to msg 8e435535fa074ae5bddafc0503795ec4 in queue reply_757213bc08bb49dab178826d88b76f40 [ 557.344791] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8e435535fa074ae5bddafc0503795ec4 [ 557.382486] env[61995]: DEBUG nova.compute.manager [None req-18b86b01-7dfe-4991-8317-17cae69a93a6 tempest-TenantUsagesTestJSON-1961309574 tempest-TenantUsagesTestJSON-1961309574-project-member] [instance: bd326bfa-d15a-4ce5-b4f6-6738aae0f60c] Starting instance... {{(pid=61995) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 557.384397] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-18b86b01-7dfe-4991-8317-17cae69a93a6 tempest-TenantUsagesTestJSON-1961309574 tempest-TenantUsagesTestJSON-1961309574-project-member] Expecting reply to msg 90193d9f013c4fc5a44832978cfb4d86 in queue reply_757213bc08bb49dab178826d88b76f40 [ 557.440230] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 90193d9f013c4fc5a44832978cfb4d86 [ 557.566119] env[61995]: DEBUG nova.compute.manager [None req-9ace5a90-e467-428a-82b2-7a1c680dfdf4 tempest-ServersAdminNegativeTestJSON-2047713534 tempest-ServersAdminNegativeTestJSON-2047713534-project-member] [instance: aa885305-7cdb-44d3-9bd2-880eb042a63e] Start building block device mappings for instance. {{(pid=61995) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 557.567758] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-9ace5a90-e467-428a-82b2-7a1c680dfdf4 tempest-ServersAdminNegativeTestJSON-2047713534 tempest-ServersAdminNegativeTestJSON-2047713534-project-member] Expecting reply to msg 66c2f686e09c4d3eb52eba2a8790ab1c in queue reply_757213bc08bb49dab178826d88b76f40 [ 557.637514] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 66c2f686e09c4d3eb52eba2a8790ab1c [ 557.727128] env[61995]: DEBUG oslo_vmware.api [None req-78466350-68e0-4e98-859f-aac3601dd53c tempest-ServersAdmin275Test-481182624 tempest-ServersAdmin275Test-481182624-project-admin] Task: {'id': session[5258be39-d7e5-2e06-99ec-0419db7e1a3e]5277ed17-84ad-d5ca-c542-3686df3568dd, 'name': SearchDatastore_Task, 'duration_secs': 0.009214} completed successfully. {{(pid=61995) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 557.727128] env[61995]: DEBUG oslo_concurrency.lockutils [None req-78466350-68e0-4e98-859f-aac3601dd53c tempest-ServersAdmin275Test-481182624 tempest-ServersAdmin275Test-481182624-project-admin] Releasing lock "[datastore1] devstack-image-cache_base/947125f0-9664-40eb-953e-b1373b076c9f" {{(pid=61995) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 557.727128] env[61995]: DEBUG nova.virt.vmwareapi.vmops [None req-78466350-68e0-4e98-859f-aac3601dd53c tempest-ServersAdmin275Test-481182624 tempest-ServersAdmin275Test-481182624-project-admin] [instance: 1bf7f97f-ccfe-4ee0-a6fb-2234769c4b62] Processing image 947125f0-9664-40eb-953e-b1373b076c9f {{(pid=61995) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 557.727936] env[61995]: DEBUG oslo_concurrency.lockutils [None req-78466350-68e0-4e98-859f-aac3601dd53c tempest-ServersAdmin275Test-481182624 tempest-ServersAdmin275Test-481182624-project-admin] Acquiring lock "[datastore1] devstack-image-cache_base/947125f0-9664-40eb-953e-b1373b076c9f/947125f0-9664-40eb-953e-b1373b076c9f.vmdk" {{(pid=61995) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 557.727936] env[61995]: DEBUG oslo_concurrency.lockutils [None req-78466350-68e0-4e98-859f-aac3601dd53c tempest-ServersAdmin275Test-481182624 tempest-ServersAdmin275Test-481182624-project-admin] Acquired lock "[datastore1] devstack-image-cache_base/947125f0-9664-40eb-953e-b1373b076c9f/947125f0-9664-40eb-953e-b1373b076c9f.vmdk" {{(pid=61995) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 557.727936] env[61995]: DEBUG nova.virt.vmwareapi.ds_util [None req-78466350-68e0-4e98-859f-aac3601dd53c tempest-ServersAdmin275Test-481182624 tempest-ServersAdmin275Test-481182624-project-admin] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61995) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 557.728325] env[61995]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3b83b0f2-b1d4-44cc-b4e0-370786cc5484 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.748244] env[61995]: DEBUG nova.virt.vmwareapi.ds_util [None req-78466350-68e0-4e98-859f-aac3601dd53c tempest-ServersAdmin275Test-481182624 tempest-ServersAdmin275Test-481182624-project-admin] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61995) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 557.748721] env[61995]: DEBUG nova.virt.vmwareapi.vmops [None req-78466350-68e0-4e98-859f-aac3601dd53c tempest-ServersAdmin275Test-481182624 tempest-ServersAdmin275Test-481182624-project-admin] Folder [datastore1] devstack-image-cache_base created. {{(pid=61995) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 557.756526] env[61995]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e238d579-b643-42c5-88f2-d413951d50a9 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.759167] env[61995]: DEBUG oslo_concurrency.lockutils [None req-90423ba7-0d04-40d8-b1bc-2365113f342c tempest-DeleteServersTestJSON-1313527082 tempest-DeleteServersTestJSON-1313527082-project-member] Acquiring lock "eff3a330-b4b9-486f-b26b-e3556d08a3b8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 557.759649] env[61995]: DEBUG oslo_concurrency.lockutils [None req-90423ba7-0d04-40d8-b1bc-2365113f342c tempest-DeleteServersTestJSON-1313527082 tempest-DeleteServersTestJSON-1313527082-project-member] Lock "eff3a330-b4b9-486f-b26b-e3556d08a3b8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 557.763269] env[61995]: DEBUG oslo_vmware.api [None req-78466350-68e0-4e98-859f-aac3601dd53c tempest-ServersAdmin275Test-481182624 tempest-ServersAdmin275Test-481182624-project-admin] Waiting for the task: (returnval){ [ 557.763269] env[61995]: value = "session[5258be39-d7e5-2e06-99ec-0419db7e1a3e]52481867-9303-b007-5b04-76e74ec09470" [ 557.763269] env[61995]: _type = "Task" [ 557.763269] env[61995]: } to complete. {{(pid=61995) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 557.773545] env[61995]: DEBUG oslo_vmware.api [None req-78466350-68e0-4e98-859f-aac3601dd53c tempest-ServersAdmin275Test-481182624 tempest-ServersAdmin275Test-481182624-project-admin] Task: {'id': session[5258be39-d7e5-2e06-99ec-0419db7e1a3e]52481867-9303-b007-5b04-76e74ec09470, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61995) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 557.785682] env[61995]: INFO nova.scheduler.client.report [None req-b5402465-5749-4221-a8d2-1de6379c27b6 tempest-VolumesAdminNegativeTest-869695280 tempest-VolumesAdminNegativeTest-869695280-project-member] Deleted allocations for instance cfffe8e0-4074-41c9-8e1b-49d621fc3c1b [ 557.797700] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-b5402465-5749-4221-a8d2-1de6379c27b6 tempest-VolumesAdminNegativeTest-869695280 tempest-VolumesAdminNegativeTest-869695280-project-member] Expecting reply to msg d9f254c47ed340ce8b0ed549ab202450 in queue reply_757213bc08bb49dab178826d88b76f40 [ 557.812806] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d9f254c47ed340ce8b0ed549ab202450 [ 557.821087] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c176ae2-85a7-438f-82a5-2a7ef9ac9918 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.830597] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1da09af-2f83-4d48-bded-ce538d4562f2 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.872909] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-759fb28b-dbc3-4540-bca7-2180571c8083 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.881957] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c21214d7-0112-471d-afca-c945aa7e5a0f {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.905256] env[61995]: DEBUG nova.compute.provider_tree [None req-c2ead0ea-6b64-4408-9280-a0773cf9692e tempest-ServersWithSpecificFlavorTestJSON-889640241 tempest-ServersWithSpecificFlavorTestJSON-889640241-project-member] Inventory has not changed in ProviderTree for provider: c68e615f-c0c6-4278-bb7b-a579e68878a7 {{(pid=61995) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 557.905858] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-c2ead0ea-6b64-4408-9280-a0773cf9692e tempest-ServersWithSpecificFlavorTestJSON-889640241 tempest-ServersWithSpecificFlavorTestJSON-889640241-project-member] Expecting reply to msg abde43830a714eda80304efdb80eb3dd in queue reply_757213bc08bb49dab178826d88b76f40 [ 557.907332] env[61995]: DEBUG nova.network.neutron [None req-95ce5374-e6a2-4567-a19e-2479f98bd091 tempest-VolumesAssistedSnapshotsTest-926918021 tempest-VolumesAssistedSnapshotsTest-926918021-project-member] [instance: 6fffc743-caaa-4356-9406-6bdc6321aa1f] Instance cache missing network info. {{(pid=61995) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 557.909792] env[61995]: DEBUG oslo_concurrency.lockutils [None req-18b86b01-7dfe-4991-8317-17cae69a93a6 tempest-TenantUsagesTestJSON-1961309574 tempest-TenantUsagesTestJSON-1961309574-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 557.914694] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg abde43830a714eda80304efdb80eb3dd [ 558.072799] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-9ace5a90-e467-428a-82b2-7a1c680dfdf4 tempest-ServersAdminNegativeTestJSON-2047713534 tempest-ServersAdminNegativeTestJSON-2047713534-project-member] Expecting reply to msg c8244607d0194a8eb16306b95fd625d3 in queue reply_757213bc08bb49dab178826d88b76f40 [ 558.097801] env[61995]: DEBUG nova.network.neutron [None req-95ce5374-e6a2-4567-a19e-2479f98bd091 tempest-VolumesAssistedSnapshotsTest-926918021 tempest-VolumesAssistedSnapshotsTest-926918021-project-member] [instance: 6fffc743-caaa-4356-9406-6bdc6321aa1f] Updating instance_info_cache with network_info: [] {{(pid=61995) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 558.098301] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-95ce5374-e6a2-4567-a19e-2479f98bd091 tempest-VolumesAssistedSnapshotsTest-926918021 tempest-VolumesAssistedSnapshotsTest-926918021-project-member] Expecting reply to msg cc61c845d0e74c29ac3229eb9b219627 in queue reply_757213bc08bb49dab178826d88b76f40 [ 558.111242] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c8244607d0194a8eb16306b95fd625d3 [ 558.113375] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cc61c845d0e74c29ac3229eb9b219627 [ 558.276429] env[61995]: DEBUG oslo_vmware.api [None req-78466350-68e0-4e98-859f-aac3601dd53c tempest-ServersAdmin275Test-481182624 tempest-ServersAdmin275Test-481182624-project-admin] Task: {'id': session[5258be39-d7e5-2e06-99ec-0419db7e1a3e]52481867-9303-b007-5b04-76e74ec09470, 'name': SearchDatastore_Task, 'duration_secs': 0.008897} completed successfully. {{(pid=61995) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 558.277305] env[61995]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5d8f4c54-6807-4bdd-96bb-8ea096db27ce {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.283009] env[61995]: DEBUG oslo_vmware.api [None req-78466350-68e0-4e98-859f-aac3601dd53c tempest-ServersAdmin275Test-481182624 tempest-ServersAdmin275Test-481182624-project-admin] Waiting for the task: (returnval){ [ 558.283009] env[61995]: value = "session[5258be39-d7e5-2e06-99ec-0419db7e1a3e]5290255f-dfa7-8d84-4496-6b77c2b14cfe" [ 558.283009] env[61995]: _type = "Task" [ 558.283009] env[61995]: } to complete. {{(pid=61995) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 558.292318] env[61995]: DEBUG oslo_vmware.api [None req-78466350-68e0-4e98-859f-aac3601dd53c tempest-ServersAdmin275Test-481182624 tempest-ServersAdmin275Test-481182624-project-admin] Task: {'id': session[5258be39-d7e5-2e06-99ec-0419db7e1a3e]5290255f-dfa7-8d84-4496-6b77c2b14cfe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61995) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 558.293539] env[61995]: DEBUG nova.network.neutron [None req-9ace5a90-e467-428a-82b2-7a1c680dfdf4 tempest-ServersAdminNegativeTestJSON-2047713534 tempest-ServersAdminNegativeTestJSON-2047713534-project-member] [instance: aa885305-7cdb-44d3-9bd2-880eb042a63e] Successfully created port: 5621f56f-bdf9-4b99-9198-47b096dab679 {{(pid=61995) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 558.301179] env[61995]: DEBUG oslo_concurrency.lockutils [None req-b5402465-5749-4221-a8d2-1de6379c27b6 tempest-VolumesAdminNegativeTest-869695280 tempest-VolumesAdminNegativeTest-869695280-project-member] Lock "cfffe8e0-4074-41c9-8e1b-49d621fc3c1b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.185s {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 558.301813] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-90423ba7-0d04-40d8-b1bc-2365113f342c tempest-DeleteServersTestJSON-1313527082 tempest-DeleteServersTestJSON-1313527082-project-member] Expecting reply to msg 15594a2069e5407c8f7599a0d0c2dd81 in queue reply_757213bc08bb49dab178826d88b76f40 [ 558.316137] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 15594a2069e5407c8f7599a0d0c2dd81 [ 558.411871] env[61995]: DEBUG nova.scheduler.client.report [None req-c2ead0ea-6b64-4408-9280-a0773cf9692e tempest-ServersWithSpecificFlavorTestJSON-889640241 tempest-ServersWithSpecificFlavorTestJSON-889640241-project-member] Inventory has not changed for provider c68e615f-c0c6-4278-bb7b-a579e68878a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61995) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 558.414301] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-c2ead0ea-6b64-4408-9280-a0773cf9692e tempest-ServersWithSpecificFlavorTestJSON-889640241 tempest-ServersWithSpecificFlavorTestJSON-889640241-project-member] Expecting reply to msg 7901e4a0250a42ed8613b833553dbe8b in queue reply_757213bc08bb49dab178826d88b76f40 [ 558.431766] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7901e4a0250a42ed8613b833553dbe8b [ 558.575978] env[61995]: DEBUG nova.compute.manager [None req-9ace5a90-e467-428a-82b2-7a1c680dfdf4 tempest-ServersAdminNegativeTestJSON-2047713534 tempest-ServersAdminNegativeTestJSON-2047713534-project-member] [instance: aa885305-7cdb-44d3-9bd2-880eb042a63e] Start spawning the instance on the hypervisor. {{(pid=61995) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 558.604755] env[61995]: DEBUG nova.virt.hardware [None req-9ace5a90-e467-428a-82b2-7a1c680dfdf4 tempest-ServersAdminNegativeTestJSON-2047713534 tempest-ServersAdminNegativeTestJSON-2047713534-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-06T07:57:28Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-06T07:57:11Z,direct_url=,disk_format='vmdk',id=947125f0-9664-40eb-953e-b1373b076c9f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3dc1e6434ef44fd0be01fd4ebf2b62ef',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-06T07:57:12Z,virtual_size=,visibility=), allow threads: False {{(pid=61995) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 558.605147] env[61995]: DEBUG nova.virt.hardware [None req-9ace5a90-e467-428a-82b2-7a1c680dfdf4 tempest-ServersAdminNegativeTestJSON-2047713534 tempest-ServersAdminNegativeTestJSON-2047713534-project-member] Flavor limits 0:0:0 {{(pid=61995) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 558.605391] env[61995]: DEBUG nova.virt.hardware [None req-9ace5a90-e467-428a-82b2-7a1c680dfdf4 tempest-ServersAdminNegativeTestJSON-2047713534 tempest-ServersAdminNegativeTestJSON-2047713534-project-member] Image limits 0:0:0 {{(pid=61995) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 558.605673] env[61995]: DEBUG nova.virt.hardware [None req-9ace5a90-e467-428a-82b2-7a1c680dfdf4 tempest-ServersAdminNegativeTestJSON-2047713534 tempest-ServersAdminNegativeTestJSON-2047713534-project-member] Flavor pref 0:0:0 {{(pid=61995) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 558.605998] env[61995]: DEBUG nova.virt.hardware [None req-9ace5a90-e467-428a-82b2-7a1c680dfdf4 tempest-ServersAdminNegativeTestJSON-2047713534 tempest-ServersAdminNegativeTestJSON-2047713534-project-member] Image pref 0:0:0 {{(pid=61995) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 558.606129] env[61995]: DEBUG nova.virt.hardware [None req-9ace5a90-e467-428a-82b2-7a1c680dfdf4 tempest-ServersAdminNegativeTestJSON-2047713534 tempest-ServersAdminNegativeTestJSON-2047713534-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61995) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 558.606641] env[61995]: DEBUG nova.virt.hardware [None req-9ace5a90-e467-428a-82b2-7a1c680dfdf4 tempest-ServersAdminNegativeTestJSON-2047713534 tempest-ServersAdminNegativeTestJSON-2047713534-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61995) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 558.606721] env[61995]: DEBUG nova.virt.hardware [None req-9ace5a90-e467-428a-82b2-7a1c680dfdf4 tempest-ServersAdminNegativeTestJSON-2047713534 tempest-ServersAdminNegativeTestJSON-2047713534-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61995) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 558.606996] env[61995]: DEBUG nova.virt.hardware [None req-9ace5a90-e467-428a-82b2-7a1c680dfdf4 tempest-ServersAdminNegativeTestJSON-2047713534 tempest-ServersAdminNegativeTestJSON-2047713534-project-member] Got 1 possible topologies {{(pid=61995) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 558.607590] env[61995]: DEBUG nova.virt.hardware [None req-9ace5a90-e467-428a-82b2-7a1c680dfdf4 tempest-ServersAdminNegativeTestJSON-2047713534 tempest-ServersAdminNegativeTestJSON-2047713534-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61995) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 558.607957] env[61995]: DEBUG nova.virt.hardware [None req-9ace5a90-e467-428a-82b2-7a1c680dfdf4 tempest-ServersAdminNegativeTestJSON-2047713534 tempest-ServersAdminNegativeTestJSON-2047713534-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61995) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 558.608694] env[61995]: DEBUG oslo_concurrency.lockutils [None req-95ce5374-e6a2-4567-a19e-2479f98bd091 tempest-VolumesAssistedSnapshotsTest-926918021 tempest-VolumesAssistedSnapshotsTest-926918021-project-member] Releasing lock "refresh_cache-6fffc743-caaa-4356-9406-6bdc6321aa1f" {{(pid=61995) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 558.609194] env[61995]: DEBUG nova.compute.manager [None req-95ce5374-e6a2-4567-a19e-2479f98bd091 tempest-VolumesAssistedSnapshotsTest-926918021 tempest-VolumesAssistedSnapshotsTest-926918021-project-member] [instance: 6fffc743-caaa-4356-9406-6bdc6321aa1f] Start destroying the instance on the hypervisor. {{(pid=61995) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 558.609491] env[61995]: DEBUG nova.virt.vmwareapi.vmops [None req-95ce5374-e6a2-4567-a19e-2479f98bd091 tempest-VolumesAssistedSnapshotsTest-926918021 tempest-VolumesAssistedSnapshotsTest-926918021-project-member] [instance: 6fffc743-caaa-4356-9406-6bdc6321aa1f] Destroying instance {{(pid=61995) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 558.611827] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49fed155-7900-42b1-a303-e33bc181ff63 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.615614] env[61995]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-858e7401-9f6b-4eee-9059-da64a0f47e2a {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.644923] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-145a3164-c98d-4a8d-bcd7-f66362103937 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.651682] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2701c655-f9ec-4ed9-bf92-47b2e3fa8c53 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.683621] env[61995]: WARNING nova.virt.vmwareapi.vmops [None req-95ce5374-e6a2-4567-a19e-2479f98bd091 tempest-VolumesAssistedSnapshotsTest-926918021 tempest-VolumesAssistedSnapshotsTest-926918021-project-member] [instance: 6fffc743-caaa-4356-9406-6bdc6321aa1f] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 6fffc743-caaa-4356-9406-6bdc6321aa1f could not be found. [ 558.683881] env[61995]: DEBUG nova.virt.vmwareapi.vmops [None req-95ce5374-e6a2-4567-a19e-2479f98bd091 tempest-VolumesAssistedSnapshotsTest-926918021 tempest-VolumesAssistedSnapshotsTest-926918021-project-member] [instance: 6fffc743-caaa-4356-9406-6bdc6321aa1f] Instance destroyed {{(pid=61995) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 558.684141] env[61995]: INFO nova.compute.manager [None req-95ce5374-e6a2-4567-a19e-2479f98bd091 tempest-VolumesAssistedSnapshotsTest-926918021 tempest-VolumesAssistedSnapshotsTest-926918021-project-member] [instance: 6fffc743-caaa-4356-9406-6bdc6321aa1f] Took 0.07 seconds to destroy the instance on the hypervisor. [ 558.684522] env[61995]: DEBUG oslo.service.loopingcall [None req-95ce5374-e6a2-4567-a19e-2479f98bd091 tempest-VolumesAssistedSnapshotsTest-926918021 tempest-VolumesAssistedSnapshotsTest-926918021-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61995) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 558.684795] env[61995]: DEBUG nova.compute.manager [-] [instance: 6fffc743-caaa-4356-9406-6bdc6321aa1f] Deallocating network for instance {{(pid=61995) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2298}} [ 558.684888] env[61995]: DEBUG nova.network.neutron [-] [instance: 6fffc743-caaa-4356-9406-6bdc6321aa1f] deallocate_for_instance() {{(pid=61995) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 558.705832] env[61995]: DEBUG nova.network.neutron [-] [instance: 6fffc743-caaa-4356-9406-6bdc6321aa1f] Instance cache missing network info. {{(pid=61995) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 558.706292] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 77593bdd26ba410dbbea3289c25b4d0a in queue reply_757213bc08bb49dab178826d88b76f40 [ 558.714168] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 77593bdd26ba410dbbea3289c25b4d0a [ 558.761187] env[61995]: DEBUG nova.compute.manager [req-a76e065a-6944-4276-abbd-5b6c46c5cdec req-9c4f72be-6b8a-47cc-bde1-220d62bb78f3 service nova] [instance: 6fffc743-caaa-4356-9406-6bdc6321aa1f] Received event network-changed-b5b283c3-8a4b-4e77-827e-2748774f1e0f {{(pid=61995) external_instance_event /opt/stack/nova/nova/compute/manager.py:11144}} [ 558.761401] env[61995]: DEBUG nova.compute.manager [req-a76e065a-6944-4276-abbd-5b6c46c5cdec req-9c4f72be-6b8a-47cc-bde1-220d62bb78f3 service nova] [instance: 6fffc743-caaa-4356-9406-6bdc6321aa1f] Refreshing instance network info cache due to event network-changed-b5b283c3-8a4b-4e77-827e-2748774f1e0f. {{(pid=61995) external_instance_event /opt/stack/nova/nova/compute/manager.py:11149}} [ 558.761640] env[61995]: DEBUG oslo_concurrency.lockutils [req-a76e065a-6944-4276-abbd-5b6c46c5cdec req-9c4f72be-6b8a-47cc-bde1-220d62bb78f3 service nova] Acquiring lock "refresh_cache-6fffc743-caaa-4356-9406-6bdc6321aa1f" {{(pid=61995) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 558.761777] env[61995]: DEBUG oslo_concurrency.lockutils [req-a76e065a-6944-4276-abbd-5b6c46c5cdec req-9c4f72be-6b8a-47cc-bde1-220d62bb78f3 service nova] Acquired lock "refresh_cache-6fffc743-caaa-4356-9406-6bdc6321aa1f" {{(pid=61995) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 558.761948] env[61995]: DEBUG nova.network.neutron [req-a76e065a-6944-4276-abbd-5b6c46c5cdec req-9c4f72be-6b8a-47cc-bde1-220d62bb78f3 service nova] [instance: 6fffc743-caaa-4356-9406-6bdc6321aa1f] Refreshing network info cache for port b5b283c3-8a4b-4e77-827e-2748774f1e0f {{(pid=61995) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 558.762873] env[61995]: INFO oslo_messaging._drivers.amqpdriver [req-a76e065a-6944-4276-abbd-5b6c46c5cdec req-9c4f72be-6b8a-47cc-bde1-220d62bb78f3 service nova] Expecting reply to msg e9ab35a3b7534de6a145dbe013b44ee5 in queue reply_757213bc08bb49dab178826d88b76f40 [ 558.771198] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e9ab35a3b7534de6a145dbe013b44ee5 [ 558.796309] env[61995]: DEBUG oslo_vmware.api [None req-78466350-68e0-4e98-859f-aac3601dd53c tempest-ServersAdmin275Test-481182624 tempest-ServersAdmin275Test-481182624-project-admin] Task: {'id': session[5258be39-d7e5-2e06-99ec-0419db7e1a3e]5290255f-dfa7-8d84-4496-6b77c2b14cfe, 'name': SearchDatastore_Task, 'duration_secs': 0.010228} completed successfully. {{(pid=61995) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 558.796613] env[61995]: DEBUG oslo_concurrency.lockutils [None req-78466350-68e0-4e98-859f-aac3601dd53c tempest-ServersAdmin275Test-481182624 tempest-ServersAdmin275Test-481182624-project-admin] Releasing lock "[datastore1] devstack-image-cache_base/947125f0-9664-40eb-953e-b1373b076c9f/947125f0-9664-40eb-953e-b1373b076c9f.vmdk" {{(pid=61995) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 558.796874] env[61995]: DEBUG nova.virt.vmwareapi.vm_util [None req-78466350-68e0-4e98-859f-aac3601dd53c tempest-ServersAdmin275Test-481182624 tempest-ServersAdmin275Test-481182624-project-admin] Copying Virtual Disk [datastore1] devstack-image-cache_base/947125f0-9664-40eb-953e-b1373b076c9f/947125f0-9664-40eb-953e-b1373b076c9f.vmdk to [datastore1] 1bf7f97f-ccfe-4ee0-a6fb-2234769c4b62/1bf7f97f-ccfe-4ee0-a6fb-2234769c4b62.vmdk {{(pid=61995) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 558.797152] env[61995]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8960b70b-2bfa-40a6-a634-6a6386ea2f3d {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.808099] env[61995]: DEBUG nova.compute.manager [None req-90423ba7-0d04-40d8-b1bc-2365113f342c tempest-DeleteServersTestJSON-1313527082 tempest-DeleteServersTestJSON-1313527082-project-member] [instance: eff3a330-b4b9-486f-b26b-e3556d08a3b8] Starting instance... {{(pid=61995) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 558.810458] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-90423ba7-0d04-40d8-b1bc-2365113f342c tempest-DeleteServersTestJSON-1313527082 tempest-DeleteServersTestJSON-1313527082-project-member] Expecting reply to msg a15a302c52424f35afe19a422f32483e in queue reply_757213bc08bb49dab178826d88b76f40 [ 558.812256] env[61995]: DEBUG oslo_vmware.api [None req-78466350-68e0-4e98-859f-aac3601dd53c tempest-ServersAdmin275Test-481182624 tempest-ServersAdmin275Test-481182624-project-admin] Waiting for the task: (returnval){ [ 558.812256] env[61995]: value = "task-378107" [ 558.812256] env[61995]: _type = "Task" [ 558.812256] env[61995]: } to complete. {{(pid=61995) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 558.821717] env[61995]: DEBUG oslo_vmware.api [None req-78466350-68e0-4e98-859f-aac3601dd53c tempest-ServersAdmin275Test-481182624 tempest-ServersAdmin275Test-481182624-project-admin] Task: {'id': task-378107, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61995) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 558.863848] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a15a302c52424f35afe19a422f32483e [ 558.917359] env[61995]: DEBUG oslo_concurrency.lockutils [None req-c2ead0ea-6b64-4408-9280-a0773cf9692e tempest-ServersWithSpecificFlavorTestJSON-889640241 tempest-ServersWithSpecificFlavorTestJSON-889640241-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.376s {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 558.919509] env[61995]: DEBUG nova.compute.manager [None req-c2ead0ea-6b64-4408-9280-a0773cf9692e tempest-ServersWithSpecificFlavorTestJSON-889640241 tempest-ServersWithSpecificFlavorTestJSON-889640241-project-member] [instance: afe9a3ca-2235-4a2b-9b9f-7988a77f3c2d] Start building networks asynchronously for instance. {{(pid=61995) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 558.919814] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-c2ead0ea-6b64-4408-9280-a0773cf9692e tempest-ServersWithSpecificFlavorTestJSON-889640241 tempest-ServersWithSpecificFlavorTestJSON-889640241-project-member] Expecting reply to msg 8dd1abe9fc9a4c65be58e13bd1ab8a07 in queue reply_757213bc08bb49dab178826d88b76f40 [ 558.923413] env[61995]: DEBUG oslo_concurrency.lockutils [None req-8622c767-54c8-41f6-a2f5-ea1693fb87d7 tempest-ServersAdminTestJSON-252110596 tempest-ServersAdminTestJSON-252110596-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.551s {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 558.925036] env[61995]: INFO nova.compute.claims [None req-8622c767-54c8-41f6-a2f5-ea1693fb87d7 tempest-ServersAdminTestJSON-252110596 tempest-ServersAdminTestJSON-252110596-project-member] [instance: 7cf953f7-4afd-4a20-86b6-96a662b6139f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 558.927188] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-8622c767-54c8-41f6-a2f5-ea1693fb87d7 tempest-ServersAdminTestJSON-252110596 tempest-ServersAdminTestJSON-252110596-project-member] Expecting reply to msg b0857e14385f4bccb4fa8efc9fab81e3 in queue reply_757213bc08bb49dab178826d88b76f40 [ 558.980245] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8dd1abe9fc9a4c65be58e13bd1ab8a07 [ 558.980245] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b0857e14385f4bccb4fa8efc9fab81e3 [ 559.208695] env[61995]: DEBUG nova.network.neutron [-] [instance: 6fffc743-caaa-4356-9406-6bdc6321aa1f] Updating instance_info_cache with network_info: [] {{(pid=61995) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 559.209278] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 33814777968a497480d49f1646d04a7f in queue reply_757213bc08bb49dab178826d88b76f40 [ 559.219520] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 33814777968a497480d49f1646d04a7f [ 559.293688] env[61995]: DEBUG nova.network.neutron [req-a76e065a-6944-4276-abbd-5b6c46c5cdec req-9c4f72be-6b8a-47cc-bde1-220d62bb78f3 service nova] [instance: 6fffc743-caaa-4356-9406-6bdc6321aa1f] Instance cache missing network info. {{(pid=61995) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 559.325826] env[61995]: DEBUG oslo_vmware.api [None req-78466350-68e0-4e98-859f-aac3601dd53c tempest-ServersAdmin275Test-481182624 tempest-ServersAdmin275Test-481182624-project-admin] Task: {'id': task-378107, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61995) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 559.335769] env[61995]: DEBUG oslo_concurrency.lockutils [None req-90423ba7-0d04-40d8-b1bc-2365113f342c tempest-DeleteServersTestJSON-1313527082 tempest-DeleteServersTestJSON-1313527082-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 559.431671] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-8622c767-54c8-41f6-a2f5-ea1693fb87d7 tempest-ServersAdminTestJSON-252110596 tempest-ServersAdminTestJSON-252110596-project-member] Expecting reply to msg a13c78a010594c7b8d9251029e0aac58 in queue reply_757213bc08bb49dab178826d88b76f40 [ 559.433198] env[61995]: DEBUG nova.compute.utils [None req-c2ead0ea-6b64-4408-9280-a0773cf9692e tempest-ServersWithSpecificFlavorTestJSON-889640241 tempest-ServersWithSpecificFlavorTestJSON-889640241-project-member] Using /dev/sd instead of None {{(pid=61995) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 559.433814] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-c2ead0ea-6b64-4408-9280-a0773cf9692e tempest-ServersWithSpecificFlavorTestJSON-889640241 tempest-ServersWithSpecificFlavorTestJSON-889640241-project-member] Expecting reply to msg 77c18e00855f430ebb5dc8c6a7210a1f in queue reply_757213bc08bb49dab178826d88b76f40 [ 559.434567] env[61995]: DEBUG nova.compute.manager [None req-c2ead0ea-6b64-4408-9280-a0773cf9692e tempest-ServersWithSpecificFlavorTestJSON-889640241 tempest-ServersWithSpecificFlavorTestJSON-889640241-project-member] [instance: afe9a3ca-2235-4a2b-9b9f-7988a77f3c2d] Allocating IP information in the background. {{(pid=61995) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1990}} [ 559.434721] env[61995]: DEBUG nova.network.neutron [None req-c2ead0ea-6b64-4408-9280-a0773cf9692e tempest-ServersWithSpecificFlavorTestJSON-889640241 tempest-ServersWithSpecificFlavorTestJSON-889640241-project-member] [instance: afe9a3ca-2235-4a2b-9b9f-7988a77f3c2d] allocate_for_instance() {{(pid=61995) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 559.446126] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a13c78a010594c7b8d9251029e0aac58 [ 559.449118] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 77c18e00855f430ebb5dc8c6a7210a1f [ 559.550821] env[61995]: DEBUG nova.network.neutron [req-a76e065a-6944-4276-abbd-5b6c46c5cdec req-9c4f72be-6b8a-47cc-bde1-220d62bb78f3 service nova] [instance: 6fffc743-caaa-4356-9406-6bdc6321aa1f] Updating instance_info_cache with network_info: [] {{(pid=61995) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 559.551437] env[61995]: INFO oslo_messaging._drivers.amqpdriver [req-a76e065a-6944-4276-abbd-5b6c46c5cdec req-9c4f72be-6b8a-47cc-bde1-220d62bb78f3 service nova] Expecting reply to msg eb2bc4eb654542b0a292a93cdcb53e97 in queue reply_757213bc08bb49dab178826d88b76f40 [ 559.558049] env[61995]: DEBUG nova.policy [None req-c2ead0ea-6b64-4408-9280-a0773cf9692e tempest-ServersWithSpecificFlavorTestJSON-889640241 tempest-ServersWithSpecificFlavorTestJSON-889640241-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '07028098a97f4e4da501bb4b3e3c100e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'be60b38a081f4074aa82b393977002fd', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61995) authorize /opt/stack/nova/nova/policy.py:203}} [ 559.563201] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg eb2bc4eb654542b0a292a93cdcb53e97 [ 559.711265] env[61995]: INFO nova.compute.manager [-] [instance: 6fffc743-caaa-4356-9406-6bdc6321aa1f] Took 1.03 seconds to deallocate network for instance. [ 559.713746] env[61995]: DEBUG nova.compute.claims [None req-95ce5374-e6a2-4567-a19e-2479f98bd091 tempest-VolumesAssistedSnapshotsTest-926918021 tempest-VolumesAssistedSnapshotsTest-926918021-project-member] [instance: 6fffc743-caaa-4356-9406-6bdc6321aa1f] Aborting claim: {{(pid=61995) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 559.713961] env[61995]: DEBUG oslo_concurrency.lockutils [None req-95ce5374-e6a2-4567-a19e-2479f98bd091 tempest-VolumesAssistedSnapshotsTest-926918021 tempest-VolumesAssistedSnapshotsTest-926918021-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 559.826842] env[61995]: DEBUG oslo_vmware.api [None req-78466350-68e0-4e98-859f-aac3601dd53c tempest-ServersAdmin275Test-481182624 tempest-ServersAdmin275Test-481182624-project-admin] Task: {'id': task-378107, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.617328} completed successfully. {{(pid=61995) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 559.827133] env[61995]: DEBUG nova.virt.vmwareapi.vm_util [None req-78466350-68e0-4e98-859f-aac3601dd53c tempest-ServersAdmin275Test-481182624 tempest-ServersAdmin275Test-481182624-project-admin] Copied Virtual Disk [datastore1] devstack-image-cache_base/947125f0-9664-40eb-953e-b1373b076c9f/947125f0-9664-40eb-953e-b1373b076c9f.vmdk to [datastore1] 1bf7f97f-ccfe-4ee0-a6fb-2234769c4b62/1bf7f97f-ccfe-4ee0-a6fb-2234769c4b62.vmdk {{(pid=61995) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 559.827316] env[61995]: DEBUG nova.virt.vmwareapi.vmops [None req-78466350-68e0-4e98-859f-aac3601dd53c tempest-ServersAdmin275Test-481182624 tempest-ServersAdmin275Test-481182624-project-admin] [instance: 1bf7f97f-ccfe-4ee0-a6fb-2234769c4b62] Extending root virtual disk to 1048576 {{(pid=61995) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 559.827556] env[61995]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ebbae8c1-83d0-462f-8dd8-eeb76d05c2cc {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.835488] env[61995]: DEBUG oslo_vmware.api [None req-78466350-68e0-4e98-859f-aac3601dd53c tempest-ServersAdmin275Test-481182624 tempest-ServersAdmin275Test-481182624-project-admin] Waiting for the task: (returnval){ [ 559.835488] env[61995]: value = "task-378108" [ 559.835488] env[61995]: _type = "Task" [ 559.835488] env[61995]: } to complete. {{(pid=61995) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 559.843727] env[61995]: DEBUG oslo_vmware.api [None req-78466350-68e0-4e98-859f-aac3601dd53c tempest-ServersAdmin275Test-481182624 tempest-ServersAdmin275Test-481182624-project-admin] Task: {'id': task-378108, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61995) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 559.941532] env[61995]: DEBUG nova.compute.manager [None req-c2ead0ea-6b64-4408-9280-a0773cf9692e tempest-ServersWithSpecificFlavorTestJSON-889640241 tempest-ServersWithSpecificFlavorTestJSON-889640241-project-member] [instance: afe9a3ca-2235-4a2b-9b9f-7988a77f3c2d] Start building block device mappings for instance. {{(pid=61995) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 559.943445] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-c2ead0ea-6b64-4408-9280-a0773cf9692e tempest-ServersWithSpecificFlavorTestJSON-889640241 tempest-ServersWithSpecificFlavorTestJSON-889640241-project-member] Expecting reply to msg 985c8a73fc1a4ecdbbfea9ab8c75e996 in queue reply_757213bc08bb49dab178826d88b76f40 [ 559.997087] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 985c8a73fc1a4ecdbbfea9ab8c75e996 [ 560.061440] env[61995]: DEBUG oslo_concurrency.lockutils [req-a76e065a-6944-4276-abbd-5b6c46c5cdec req-9c4f72be-6b8a-47cc-bde1-220d62bb78f3 service nova] Releasing lock "refresh_cache-6fffc743-caaa-4356-9406-6bdc6321aa1f" {{(pid=61995) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 560.171982] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccd2e56e-c7ae-4b68-970e-4cc9b0cd78bb {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.179668] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02de1dd5-82b1-491d-a3f9-e87095142f3c {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.227820] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c887600-4fe6-4dd8-ab17-cb3e9d474465 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.237348] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9971224f-40a8-4952-9689-9781ff12bd5f {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.270558] env[61995]: DEBUG nova.compute.provider_tree [None req-8622c767-54c8-41f6-a2f5-ea1693fb87d7 tempest-ServersAdminTestJSON-252110596 tempest-ServersAdminTestJSON-252110596-project-member] Inventory has not changed in ProviderTree for provider: c68e615f-c0c6-4278-bb7b-a579e68878a7 {{(pid=61995) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 560.271116] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-8622c767-54c8-41f6-a2f5-ea1693fb87d7 tempest-ServersAdminTestJSON-252110596 tempest-ServersAdminTestJSON-252110596-project-member] Expecting reply to msg 54a1fad0e2304f3c90a85f6def6edebe in queue reply_757213bc08bb49dab178826d88b76f40 [ 560.286481] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 54a1fad0e2304f3c90a85f6def6edebe [ 560.346021] env[61995]: DEBUG oslo_vmware.api [None req-78466350-68e0-4e98-859f-aac3601dd53c tempest-ServersAdmin275Test-481182624 tempest-ServersAdmin275Test-481182624-project-admin] Task: {'id': task-378108, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065952} completed successfully. {{(pid=61995) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 560.346283] env[61995]: DEBUG nova.virt.vmwareapi.vmops [None req-78466350-68e0-4e98-859f-aac3601dd53c tempest-ServersAdmin275Test-481182624 tempest-ServersAdmin275Test-481182624-project-admin] [instance: 1bf7f97f-ccfe-4ee0-a6fb-2234769c4b62] Extended root virtual disk {{(pid=61995) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 560.347057] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e5587d4-8bd0-4647-a243-7ac31e1f0061 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.370061] env[61995]: DEBUG nova.virt.vmwareapi.volumeops [None req-78466350-68e0-4e98-859f-aac3601dd53c tempest-ServersAdmin275Test-481182624 tempest-ServersAdmin275Test-481182624-project-admin] [instance: 1bf7f97f-ccfe-4ee0-a6fb-2234769c4b62] Reconfiguring VM instance instance-00000001 to attach disk [datastore1] 1bf7f97f-ccfe-4ee0-a6fb-2234769c4b62/1bf7f97f-ccfe-4ee0-a6fb-2234769c4b62.vmdk or device None with type sparse {{(pid=61995) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 560.370384] env[61995]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bfc661d5-681f-4b67-88a5-f559840b1772 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.395460] env[61995]: DEBUG oslo_vmware.api [None req-78466350-68e0-4e98-859f-aac3601dd53c tempest-ServersAdmin275Test-481182624 tempest-ServersAdmin275Test-481182624-project-admin] Waiting for the task: (returnval){ [ 560.395460] env[61995]: value = "task-378109" [ 560.395460] env[61995]: _type = "Task" [ 560.395460] env[61995]: } to complete. {{(pid=61995) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 560.404130] env[61995]: DEBUG oslo_vmware.api [None req-78466350-68e0-4e98-859f-aac3601dd53c tempest-ServersAdmin275Test-481182624 tempest-ServersAdmin275Test-481182624-project-admin] Task: {'id': task-378109, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61995) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 560.456697] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-c2ead0ea-6b64-4408-9280-a0773cf9692e tempest-ServersWithSpecificFlavorTestJSON-889640241 tempest-ServersWithSpecificFlavorTestJSON-889640241-project-member] Expecting reply to msg e69d81dae60e42ae802eb0446ce35573 in queue reply_757213bc08bb49dab178826d88b76f40 [ 560.501776] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e69d81dae60e42ae802eb0446ce35573 [ 560.780865] env[61995]: DEBUG nova.scheduler.client.report [None req-8622c767-54c8-41f6-a2f5-ea1693fb87d7 tempest-ServersAdminTestJSON-252110596 tempest-ServersAdminTestJSON-252110596-project-member] Inventory has not changed for provider c68e615f-c0c6-4278-bb7b-a579e68878a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61995) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 560.780865] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-8622c767-54c8-41f6-a2f5-ea1693fb87d7 tempest-ServersAdminTestJSON-252110596 tempest-ServersAdminTestJSON-252110596-project-member] Expecting reply to msg 98f0d48cb49d48dea4105c09f70716c1 in queue reply_757213bc08bb49dab178826d88b76f40 [ 560.797711] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 98f0d48cb49d48dea4105c09f70716c1 [ 560.828871] env[61995]: DEBUG nova.network.neutron [None req-c2ead0ea-6b64-4408-9280-a0773cf9692e tempest-ServersWithSpecificFlavorTestJSON-889640241 tempest-ServersWithSpecificFlavorTestJSON-889640241-project-member] [instance: afe9a3ca-2235-4a2b-9b9f-7988a77f3c2d] Successfully created port: 8f16e047-310a-453b-a60e-32d3695648a7 {{(pid=61995) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 560.906256] env[61995]: DEBUG oslo_vmware.api [None req-78466350-68e0-4e98-859f-aac3601dd53c tempest-ServersAdmin275Test-481182624 tempest-ServersAdmin275Test-481182624-project-admin] Task: {'id': task-378109, 'name': ReconfigVM_Task, 'duration_secs': 0.269483} completed successfully. {{(pid=61995) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 560.906718] env[61995]: DEBUG nova.virt.vmwareapi.volumeops [None req-78466350-68e0-4e98-859f-aac3601dd53c tempest-ServersAdmin275Test-481182624 tempest-ServersAdmin275Test-481182624-project-admin] [instance: 1bf7f97f-ccfe-4ee0-a6fb-2234769c4b62] Reconfigured VM instance instance-00000001 to attach disk [datastore1] 1bf7f97f-ccfe-4ee0-a6fb-2234769c4b62/1bf7f97f-ccfe-4ee0-a6fb-2234769c4b62.vmdk or device None with type sparse {{(pid=61995) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 560.907567] env[61995]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-044811e7-d72a-49cb-9917-b15aeb1a8c96 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.927440] env[61995]: DEBUG oslo_vmware.api [None req-78466350-68e0-4e98-859f-aac3601dd53c tempest-ServersAdmin275Test-481182624 tempest-ServersAdmin275Test-481182624-project-admin] Waiting for the task: (returnval){ [ 560.927440] env[61995]: value = "task-378111" [ 560.927440] env[61995]: _type = "Task" [ 560.927440] env[61995]: } to complete. {{(pid=61995) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 560.940621] env[61995]: DEBUG oslo_vmware.api [None req-78466350-68e0-4e98-859f-aac3601dd53c tempest-ServersAdmin275Test-481182624 tempest-ServersAdmin275Test-481182624-project-admin] Task: {'id': task-378111, 'name': Rename_Task} progress is 5%. {{(pid=61995) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 560.964385] env[61995]: DEBUG nova.compute.manager [None req-c2ead0ea-6b64-4408-9280-a0773cf9692e tempest-ServersWithSpecificFlavorTestJSON-889640241 tempest-ServersWithSpecificFlavorTestJSON-889640241-project-member] [instance: afe9a3ca-2235-4a2b-9b9f-7988a77f3c2d] Start spawning the instance on the hypervisor. {{(pid=61995) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 561.008669] env[61995]: DEBUG nova.virt.hardware [None req-c2ead0ea-6b64-4408-9280-a0773cf9692e tempest-ServersWithSpecificFlavorTestJSON-889640241 tempest-ServersWithSpecificFlavorTestJSON-889640241-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-06T07:59:18Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='1082964790',id=20,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_0-1295339992',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-06T07:57:11Z,direct_url=,disk_format='vmdk',id=947125f0-9664-40eb-953e-b1373b076c9f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3dc1e6434ef44fd0be01fd4ebf2b62ef',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-06T07:57:12Z,virtual_size=,visibility=), allow threads: False {{(pid=61995) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 561.008975] env[61995]: DEBUG nova.virt.hardware [None req-c2ead0ea-6b64-4408-9280-a0773cf9692e tempest-ServersWithSpecificFlavorTestJSON-889640241 tempest-ServersWithSpecificFlavorTestJSON-889640241-project-member] Flavor limits 0:0:0 {{(pid=61995) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 561.009261] env[61995]: DEBUG nova.virt.hardware [None req-c2ead0ea-6b64-4408-9280-a0773cf9692e tempest-ServersWithSpecificFlavorTestJSON-889640241 tempest-ServersWithSpecificFlavorTestJSON-889640241-project-member] Image limits 0:0:0 {{(pid=61995) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 561.009365] env[61995]: DEBUG nova.virt.hardware [None req-c2ead0ea-6b64-4408-9280-a0773cf9692e tempest-ServersWithSpecificFlavorTestJSON-889640241 tempest-ServersWithSpecificFlavorTestJSON-889640241-project-member] Flavor pref 0:0:0 {{(pid=61995) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 561.009507] env[61995]: DEBUG nova.virt.hardware [None req-c2ead0ea-6b64-4408-9280-a0773cf9692e tempest-ServersWithSpecificFlavorTestJSON-889640241 tempest-ServersWithSpecificFlavorTestJSON-889640241-project-member] Image pref 0:0:0 {{(pid=61995) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 561.009649] env[61995]: DEBUG nova.virt.hardware [None req-c2ead0ea-6b64-4408-9280-a0773cf9692e tempest-ServersWithSpecificFlavorTestJSON-889640241 tempest-ServersWithSpecificFlavorTestJSON-889640241-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61995) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 561.009881] env[61995]: DEBUG nova.virt.hardware [None req-c2ead0ea-6b64-4408-9280-a0773cf9692e tempest-ServersWithSpecificFlavorTestJSON-889640241 tempest-ServersWithSpecificFlavorTestJSON-889640241-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61995) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 561.010049] env[61995]: DEBUG nova.virt.hardware [None req-c2ead0ea-6b64-4408-9280-a0773cf9692e tempest-ServersWithSpecificFlavorTestJSON-889640241 tempest-ServersWithSpecificFlavorTestJSON-889640241-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61995) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 561.010213] env[61995]: DEBUG nova.virt.hardware [None req-c2ead0ea-6b64-4408-9280-a0773cf9692e tempest-ServersWithSpecificFlavorTestJSON-889640241 tempest-ServersWithSpecificFlavorTestJSON-889640241-project-member] Got 1 possible topologies {{(pid=61995) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 561.010370] env[61995]: DEBUG nova.virt.hardware [None req-c2ead0ea-6b64-4408-9280-a0773cf9692e tempest-ServersWithSpecificFlavorTestJSON-889640241 tempest-ServersWithSpecificFlavorTestJSON-889640241-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61995) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 561.010535] env[61995]: DEBUG nova.virt.hardware [None req-c2ead0ea-6b64-4408-9280-a0773cf9692e tempest-ServersWithSpecificFlavorTestJSON-889640241 tempest-ServersWithSpecificFlavorTestJSON-889640241-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61995) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 561.011458] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbf805fd-9b17-4726-ac98-b87c2173ca4e {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.019071] env[61995]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c77c76e-dc9c-467e-b84e-5b8110a44327 {{(pid=61995) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.282515] env[61995]: DEBUG oslo_concurrency.lockutils [None req-8622c767-54c8-41f6-a2f5-ea1693fb87d7 tempest-ServersAdminTestJSON-252110596 tempest-ServersAdminTestJSON-252110596-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.359s {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 561.282810] env[61995]: DEBUG nova.compute.manager [None req-8622c767-54c8-41f6-a2f5-ea1693fb87d7 tempest-ServersAdminTestJSON-252110596 tempest-ServersAdminTestJSON-252110596-project-member] [instance: 7cf953f7-4afd-4a20-86b6-96a662b6139f] Start building networks asynchronously for instance. {{(pid=61995) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 561.284657] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-8622c767-54c8-41f6-a2f5-ea1693fb87d7 tempest-ServersAdminTestJSON-252110596 tempest-ServersAdminTestJSON-252110596-project-member] Expecting reply to msg 29446395211a4a458a9d8b0e9e44d047 in queue reply_757213bc08bb49dab178826d88b76f40 [ 561.285717] env[61995]: DEBUG oslo_concurrency.lockutils [None req-f05e4bca-5463-449c-bcd7-bd61934970a5 tempest-AttachInterfacesTestJSON-1933428740 tempest-AttachInterfacesTestJSON-1933428740-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.393s {{(pid=61995) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 561.287159] env[61995]: INFO nova.compute.claims [None req-f05e4bca-5463-449c-bcd7-bd61934970a5 tempest-AttachInterfacesTestJSON-1933428740 tempest-AttachInterfacesTestJSON-1933428740-project-member] [instance: 4efc40b0-7ac3-4455-be40-4480240f1ae6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 561.288778] env[61995]: INFO oslo_messaging._drivers.amqpdriver [None req-f05e4bca-5463-449c-bcd7-bd61934970a5 tempest-AttachInterfacesTestJSON-1933428740 tempest-AttachInterfacesTestJSON-1933428740-project-member] Expecting reply to msg 99d16b6a3383467489b9ab6ea52b99ad in queue reply_757213bc08bb49dab178826d88b76f40 [ 561.332494] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 29446395211a4a458a9d8b0e9e44d047 [ 561.333009] env[61995]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 99d16b6a3383467489b9ab6ea52b99ad [ 561.414446] env[61995]: ERROR nova.compute.manager [None req-9ace5a90-e467-428a-82b2-7a1c680dfdf4 tempest-ServersAdminNegativeTestJSON-2047713534 tempest-ServersAdminNegativeTestJSON-2047713534-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 5621f56f-bdf9-4b99-9198-47b096dab679, please check neutron logs for more information. [ 561.414446] env[61995]: ERROR nova.compute.manager Traceback (most recent call last): [ 561.414446] env[61995]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1998, in _allocate_network_async [ 561.414446] env[61995]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 561.414446] env[61995]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 561.414446] env[61995]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 561.414446] env[61995]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 561.414446] env[61995]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 561.414446] env[61995]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 561.414446] env[61995]: ERROR nova.compute.manager self.force_reraise() [ 561.414446] env[61995]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 561.414446] env[61995]: ERROR nova.compute.manager raise self.value [ 561.414446] env[61995]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 561.414446] env[61995]: ERROR nova.compute.manager updated_port = self._update_port( [ 561.414446] env[61995]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 561.414446] env[61995]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 561.414951] env[61995]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 561.414951] env[61995]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 561.414951] env[61995]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 5621f56f-bdf9-4b99-9198-47b096dab679, please check neutron logs for more information. [ 561.414951] env[61995]: ERROR nova.compute.manager [ 561.414951] env[61995]: Traceback (most recent call last): [ 561.414951] env[61995]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 561.414951] env[61995]: listener.cb(fileno) [ 561.414951] env[61995]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 561.414951] env[61995]: result = function(*args, **kwargs) [ 561.414951] env[61995]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 561.414951] env[61995]: return func(*args, **kwargs) [ 561.414951] env[61995]: File "/opt/stack/nova/nova/compute/manager.py", line 2020, in _allocate_network_async [ 561.414951] env[61995]: raise e [ 561.414951] env[61995]: File "/opt/stack/nova/nova/compute/manager.py", line 1998, in _allocate_network_async [ 561.414951] env[61995]: nwinfo = self.network_api.allocate_for_instance( [ 561.414951] env[61995]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 561.414951] env[61995]: created_port_ids = self._update_ports_for_instance( [ 561.414951] env[61995]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 561.414951] env[61995]: with excutils.save_and_reraise_exception(): [ 561.414951] env[61995]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 561.414951] env[61995]: self.force_reraise() [ 561.414951] env[61995]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 561.414951] env[61995]: raise self.value [ 561.414951] env[61995]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 561.414951] env[61995]: updated_port = self._update_port( [ 561.414951] env[61995]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 561.414951] env[61995]: _ensure_no_port_binding_failure(port) [ 561.414951] env[61995]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 561.414951] env[61995]: raise exception.PortBindingFailed(port_id=port['id']) [ 561.415869] env[61995]: nova.exception.PortBindingFailed: Binding failed for port 5621f56f-bdf9-4b99-9198-47b096dab679, please check neutron logs for more information. [ 561.415869] env[61995]: Removing descriptor: 17 [ 561.415869] env[61995]: ERROR nova.compute.manager [None req-9ace5a90-e467-428a-82b2-7a1c680dfdf4 tempest-ServersAdminNegativeTestJSON-2047713534 tempest-ServersAdminNegativeTestJSON-2047713534-project-member] [instance: aa885305-7cdb-44d3-9bd2-880eb042a63e] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 5621f56f-bdf9-4b99-9198-47b096dab679, please check neutron logs for more information. [ 561.415869] env[61995]: ERROR nova.compute.manager [instance: aa885305-7cdb-44d3-9bd2-880eb042a63e] Traceback (most recent call last): [ 561.415869] env[61995]: ERROR nova.compute.manager [instance: aa885305-7cdb-44d3-9bd2-880eb042a63e] File "/opt/stack/nova/nova/compute/manager.py", line 2903, in _build_resources [ 561.415869] env[61995]: ERROR nova.compute.manager [instance: aa885305-7cdb-44d3-9bd2-880eb042a63e] yield resources [ 561.415869] env[61995]: ERROR nova.compute.manager [instance: aa885305-7cdb-44d3-9bd2-880eb042a63e] File "/opt/stack/nova/nova/compute/manager.py", line 2650, in _build_and_run_instance [ 561.415869] env[61995]: ERROR nova.compute.manager [instance: aa885305-7cdb-44d3-9bd2-880eb042a63e] self.driver.spawn(context, instance, image_meta, [ 561.415869] env[61995]: ERROR nova.compute.manager [instance: aa885305-7cdb-44d3-9bd2-880eb042a63e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 561.415869] env[61995]: ERROR nova.compute.manager [instance: aa885305-7cdb-44d3-9bd2-880eb042a63e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 561.415869] env[61995]: ERROR nova.compute.manager [instance: aa885305-7cdb-44d3-9bd2-880eb042a63e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 561.415869] env[61995]: ERROR nova.compute.manager [instance: aa885305-7cdb-44d3-9bd2-880eb042a63e] vm_ref = self.build_virtual_machine(instance, [ 561.416272] env[61995]: ERROR nova.compute.manager [instance: aa885305-7cdb-44d3-9bd2-880eb042a63e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 561.416272] env[61995]: ERROR nova.compute.manager [instance: aa885305-7cdb-44d3-9bd2-880eb042a63e] vif_infos = vmwarevif.get_vif_info(self._session, [ 561.416272] env[61995]: ERROR nova.compute.manager [instance: aa885305-7cdb-44d3-9bd2-880eb042a63e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 561.416272] env[61995]: ERROR nova.compute.manager [instance: aa885305-7cdb-44d3-9bd2-880eb042a63e] for vif in network_info: [ 561.416272] env[61995]: ERROR nova.compute.manager [instance: aa885305-7cdb-44d3-9bd2-880eb042a63e] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 561.416272] env[61995]: ERROR nova.compute.manager [instance: aa885305-7cdb-44d3-9bd2-880eb042a63e] return self._sync_wrapper(fn, *args, **kwargs) [ 561.416272] env[61995]: ERROR nova.compute.manager [instance: aa885305-7cdb-44d3-9bd2-880eb042a63e] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 561.416272] env[61995]: ERROR nova.compute.manager [instance: aa885305-7cdb-44d3-9bd2-880eb042a63e] self.wait() [ 561.416272] env[61995]: ERROR nova.compute.manager [instance: aa885305-7cdb-44d3-9bd2-880eb042a63e] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 561.416272] env[61995]: ERROR nova.compute.manager [instance: aa885305-7cdb-44d3-9bd2-880eb042a63e] self[:] = self._gt.wait() [ 561.416272] env[61995]: ERROR nova.compute.manager [instance: aa885305-7cdb-44d3-9bd2-880eb042a63e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 561.416272] env[61995]: ERROR nova.compute.manager [instance: aa885305-7cdb-44d3-9bd2-880eb042a63e] return self._exit_event.wait() [ 561.416272] env[61995]: ERROR nova.compute.manager [instance: aa885305-7cdb-44d3-9bd2-880eb042a63e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 561.416661] env[61995]: ERROR nova.compute.manager [instance: aa885305-7cdb-44d3-9bd2-880eb042a63e] result = hub.switch() [ 561.416661] env[61995]: ERROR nova.compute.manager [instance: aa885305-7cdb-44d3-9bd2-880eb042a63e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 561.416661] env[61995]: ERROR nova.compute.manager [instance: aa885305-7cdb-44d3-9bd2-880eb042a63e] return self.greenlet.switch() [ 561.416661] env[61995]: ERROR nova.compute.manager [instance: aa885305-7cdb-44d3-9bd2-880eb042a63e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 561.416661] env[61995]: ERROR nova.compute.manager [instance: aa885305-7cdb-44d3-9bd2-880eb042a63e] result = function(*args, **kwargs) [ 561.416661] env[61995]: ERROR nova.compute.manager [instance: aa885305-7cdb-44d3-9bd2-880eb042a63e] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 561.416661] env[61995]: ERROR nova.compute.manager [instance: aa885305-7cdb-44d3-9bd2-880eb042a63e] return func(*args, **kwargs) [ 561.416661] env[61995]: ERROR nova.compute.manager [instance: aa885305-7cdb-44d3-9bd2-880eb042a63e] File "/opt/stack/nova/nova/compute/manager.py", line 2020, in _allocate_network_async [ 561.416661] env[61995]: ERROR nova.compute.manager [instance: aa885305-7cdb-44d3-9